lsst.obs.base  14.0-27-g0dd1869
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 from builtins import str
24 import copy
25 import os
26 import pyfits # required by _makeDefectsDict until defects are written as AFW tables
27 import re
28 import weakref
29 import lsst.daf.persistence as dafPersist
30 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base as dafBase
32 import lsst.afw.geom as afwGeom
33 import lsst.afw.image as afwImage
34 import lsst.afw.table as afwTable
35 from lsst.afw.fits import readMetadata
36 import lsst.afw.cameraGeom as afwCameraGeom
37 import lsst.log as lsstLog
38 import lsst.pex.policy as pexPolicy
39 import lsst.pex.exceptions as pexExcept
40 from .exposureIdInfo import ExposureIdInfo
41 from .makeRawVisitInfo import MakeRawVisitInfo
42 from lsst.utils import getPackageDir
43 
44 __all__ = ["CameraMapper", "exposureFromImage"]
45 
46 
47 class CameraMapper(dafPersist.Mapper):
48 
49  """CameraMapper is a base class for mappers that handle images from a
50  camera and products derived from them. This provides an abstraction layer
51  between the data on disk and the code.
52 
53  Public methods: keys, queryMetadata, getDatasetTypes, map,
54  canStandardize, standardize
55 
56  Mappers for specific data sources (e.g., CFHT Megacam, LSST
57  simulations, etc.) should inherit this class.
58 
59  The CameraMapper manages datasets within a "root" directory. Note that
60  writing to a dataset present in the input root will hide the existing
61  dataset but not overwrite it. See #2160 for design discussion.
62 
63  A camera is assumed to consist of one or more rafts, each composed of
64  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
65  (amps). A camera is also assumed to have a camera geometry description
66  (CameraGeom object) as a policy file, a filter description (Filter class
67  static configuration) as another policy file, and an optional defects
68  description directory.
69 
70  Information from the camera geometry and defects are inserted into all
71  Exposure objects returned.
72 
73  The mapper uses one or two registries to retrieve metadata about the
74  images. The first is a registry of all raw exposures. This must contain
75  the time of the observation. One or more tables (or the equivalent)
76  within the registry are used to look up data identifier components that
77  are not specified by the user (e.g. filter) and to return results for
78  metadata queries. The second is an optional registry of all calibration
79  data. This should contain validity start and end entries for each
80  calibration dataset in the same timescale as the observation time.
81 
82  Subclasses will typically set MakeRawVisitInfoClass:
83 
84  MakeRawVisitInfoClass: a class variable that points to a subclass of
85  MakeRawVisitInfo, a functor that creates an
86  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
87 
88  Subclasses must provide the following methods:
89 
90  _extractDetectorName(self, dataId): returns the detector name for a CCD
91  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
92  a dataset identifier referring to that CCD or a subcomponent of it.
93 
94  _computeCcdExposureId(self, dataId): see below
95 
96  _computeCoaddExposureId(self, dataId, singleFilter): see below
97 
98  Subclasses may also need to override the following methods:
99 
100  _transformId(self, dataId): transformation of a data identifier
101  from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"),
102  including making suitable for path expansion (e.g. removing commas).
103  The default implementation does nothing. Note that this
104  method should not modify its input parameter.
105 
106  getShortCcdName(self, ccdName): a static method that returns a shortened name
107  suitable for use as a filename. The default version converts spaces to underscores.
108 
109  _getCcdKeyVal(self, dataId): return a CCD key and value
110  by which to look up defects in the defects registry.
111  The default value returns ("ccd", detector name)
112 
113  _mapActualToPath(self, template, actualId): convert a template path to an
114  actual path, using the actual dataset identifier.
115 
116  The mapper's behaviors are largely specified by the policy file.
117  See the MapperDictionary.paf for descriptions of the available items.
118 
119  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
120  mappings (see Mappings class).
121 
122  Common default mappings for all subclasses can be specified in the
123  "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides
124  a simple way to add a product to all camera mappers.
125 
126  Functions to map (provide a path to the data given a dataset
127  identifier dictionary) and standardize (convert data into some standard
128  format or type) may be provided in the subclass as "map_{dataset type}"
129  and "std_{dataset type}", respectively.
130 
131  If non-Exposure datasets cannot be retrieved using standard
132  daf_persistence methods alone, a "bypass_{dataset type}" function may be
133  provided in the subclass to return the dataset instead of using the
134  "datasets" subpolicy.
135 
136  Implementations of map_camera and bypass_camera that should typically be
137  sufficient are provided in this base class.
138 
139  Notes
140  -----
141  TODO:
142 
143  - Handle defects the same was as all other calibration products, using the calibration registry
144  - Instead of auto-loading the camera at construction time, load it from the calibration registry
145  - Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention
146  of pyfits from this package.
147  """
148  packageName = None
149 
150  # a class or subclass of MakeRawVisitInfo, a functor that makes an
151  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
152  MakeRawVisitInfoClass = MakeRawVisitInfo
153 
154  # a class or subclass of PupilFactory
155  PupilFactoryClass = afwCameraGeom.PupilFactory
156 
157  def __init__(self, policy, repositoryDir,
158  root=None, registry=None, calibRoot=None, calibRegistry=None,
159  provided=None, parentRegistry=None, repositoryCfg=None):
160  """Initialize the CameraMapper.
161 
162  Parameters
163  ----------
164  policy : daf_persistence.Policy,
165  Can also be pexPolicy.Policy, only for backward compatibility.
166  Policy with per-camera defaults already merged.
167  repositoryDir : string
168  Policy repository for the subclassing module (obtained with
169  getRepositoryPath() on the per-camera default dictionary).
170  root : string, optional
171  Path to the root directory for data.
172  registry : string, optional
173  Path to registry with data's metadata.
174  calibRoot : string, optional
175  Root directory for calibrations.
176  calibRegistry : string, optional
177  Path to registry with calibrations' metadata.
178  provided : list of string, optional
179  Keys provided by the mapper.
180  parentRegistry : Registry subclass, optional
181  Registry from a parent repository that may be used to look up
182  data's metadata.
183  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
184  The configuration information for the repository this mapper is
185  being used with.
186  """
187 
188  dafPersist.Mapper.__init__(self)
189 
190  self.log = lsstLog.Log.getLogger("CameraMapper")
191 
192  if root:
193  self.root = root
194  elif repositoryCfg:
195  self.root = repositoryCfg.root
196  else:
197  self.root = None
198  if isinstance(policy, pexPolicy.Policy):
199  policy = dafPersist.Policy(policy)
200 
201  repoPolicy = repositoryCfg.policy if repositoryCfg else None
202  if repoPolicy is not None:
203  policy.update(repoPolicy)
204 
205  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
206  "MapperDictionary.paf",
207  "policy")
208  dictPolicy = dafPersist.Policy(defaultPolicyFile)
209  policy.merge(dictPolicy)
210 
211  # Levels
212  self.levels = dict()
213  if 'levels' in policy:
214  levelsPolicy = policy['levels']
215  for key in levelsPolicy.names(True):
216  self.levels[key] = set(levelsPolicy.asArray(key))
217  self.defaultLevel = policy['defaultLevel']
218  self.defaultSubLevels = dict()
219  if 'defaultSubLevels' in policy:
220  self.defaultSubLevels = policy['defaultSubLevels']
221 
222  # Root directories
223  if root is None:
224  root = "."
225  root = dafPersist.LogicalLocation(root).locString()
226 
227  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
228 
229  # If the calibRoot is passed in, use that. If not and it's indicated in
230  # the policy, use that. And otherwise, the calibs are in the regular
231  # root.
232  # If the location indicated by the calib root does not exist, do not
233  # create it.
234  calibStorage = None
235  if calibRoot is not None:
236  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
237  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
238  create=False)
239  else:
240  calibRoot = policy.get('calibRoot', None)
241  if calibRoot:
242  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
243  create=False)
244  if calibStorage is None:
245  calibStorage = self.rootStorage
246 
247  self.root = root
248 
249  # Registries
250  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
251  self.rootStorage, searchParents=False,
252  posixIfNoSql=(not parentRegistry))
253  if not self.registry:
254  self.registry = parentRegistry
255  needCalibRegistry = policy.get('needCalibRegistry', None)
256  if needCalibRegistry:
257  if calibStorage:
258  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
259  "calibRegistryPath", calibStorage,
260  posixIfNoSql=False) # NB never use posix for calibs
261  else:
262  raise RuntimeError(
263  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
264  "calibRoot ivar:%s or policy['calibRoot']:%s" %
265  (calibRoot, policy.get('calibRoot', None)))
266  else:
267  self.calibRegistry = None
268 
269  # Dict of valid keys and their value types
270  self.keyDict = dict()
271 
272  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
273  self._initWriteRecipes()
274 
275  # Camera geometry
276  self.cameraDataLocation = None # path to camera geometry config file
277  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
278 
279  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
280  # camera package, which is on the local filesystem.
281  self.defectRegistry = None
282  if 'defects' in policy:
283  self.defectPath = os.path.join(repositoryDir, policy['defects'])
284  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
285  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
286 
287  # Filter translation table
288  self.filters = None
289 
290  # Skytile policy
291  self.skypolicy = policy['skytiles']
292 
293  # verify that the class variable packageName is set before attempting
294  # to instantiate an instance
295  if self.packageName is None:
296  raise ValueError('class variable packageName must not be None')
297 
299 
300  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
301  """Initialize mappings
302 
303  For each of the dataset types that we want to be able to read, there are
304  methods that can be created to support them:
305  * map_<dataset> : determine the path for dataset
306  * std_<dataset> : standardize the retrieved dataset
307  * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery)
308  * query_<dataset> : query the registry
309 
310  Besides the dataset types explicitly listed in the policy, we create
311  additional, derived datasets for additional conveniences, e.g., reading
312  the header of an image, retrieving only the size of a catalog.
313 
314  Parameters
315  ----------
316  policy : `lsst.daf.persistence.Policy`
317  Policy with per-camera defaults already merged
318  rootStorage : `Storage subclass instance`
319  Interface to persisted repository data.
320  calibRoot : `Storage subclass instance`
321  Interface to persisted calib repository data
322  provided : `list` of `str`
323  Keys provided by the mapper
324  """
325  # Sub-dictionaries (for exposure/calibration/dataset types)
326  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
327  "obs_base", "ImageMappingDictionary.paf", "policy"))
328  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
329  "obs_base", "ExposureMappingDictionary.paf", "policy"))
330  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
331  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
332  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
333  "obs_base", "DatasetMappingDictionary.paf", "policy"))
334 
335  # Mappings
336  mappingList = (
337  ("images", imgMappingPolicy, ImageMapping),
338  ("exposures", expMappingPolicy, ExposureMapping),
339  ("calibrations", calMappingPolicy, CalibrationMapping),
340  ("datasets", dsMappingPolicy, DatasetMapping)
341  )
342  self.mappings = dict()
343  for name, defPolicy, cls in mappingList:
344  if name in policy:
345  datasets = policy[name]
346 
347  # Centrally-defined datasets
348  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
349  if os.path.exists(defaultsPath):
350  datasets.merge(dafPersist.Policy(defaultsPath))
351 
352  mappings = dict()
353  setattr(self, name, mappings)
354  for datasetType in datasets.names(True):
355  subPolicy = datasets[datasetType]
356  subPolicy.merge(defPolicy)
357 
358  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
359  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
360  subPolicy=subPolicy):
361  components = subPolicy.get('composite')
362  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
363  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
364  python = subPolicy['python']
365  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
366  disassembler=disassembler,
367  python=python,
368  dataId=dataId,
369  mapper=self)
370  for name, component in components.items():
371  butlerComposite.add(id=name,
372  datasetType=component.get('datasetType'),
373  setter=component.get('setter', None),
374  getter=component.get('getter', None),
375  subset=component.get('subset', False),
376  inputOnly=component.get('inputOnly', False))
377  return butlerComposite
378  setattr(self, "map_" + datasetType, compositeClosure)
379  # for now at least, don't set up any other handling for this dataset type.
380  continue
381 
382  if name == "calibrations":
383  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
384  provided=provided, dataRoot=rootStorage)
385  else:
386  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
387  self.keyDict.update(mapping.keys())
388  mappings[datasetType] = mapping
389  self.mappings[datasetType] = mapping
390  if not hasattr(self, "map_" + datasetType):
391  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
392  return mapping.map(mapper, dataId, write)
393  setattr(self, "map_" + datasetType, mapClosure)
394  if not hasattr(self, "query_" + datasetType):
395  def queryClosure(format, dataId, mapping=mapping):
396  return mapping.lookup(format, dataId)
397  setattr(self, "query_" + datasetType, queryClosure)
398  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
399  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
400  return mapping.standardize(mapper, item, dataId)
401  setattr(self, "std_" + datasetType, stdClosure)
402 
403  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
404  """Set convenience methods on CameraMapper"""
405  mapName = "map_" + datasetType + "_" + suffix
406  bypassName = "bypass_" + datasetType + "_" + suffix
407  queryName = "query_" + datasetType + "_" + suffix
408  if not hasattr(self, mapName):
409  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
410  if not hasattr(self, bypassName):
411  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
412  bypassImpl = getattr(self, "bypass_" + datasetType)
413  if bypassImpl is not None:
414  setattr(self, bypassName, bypassImpl)
415  if not hasattr(self, queryName):
416  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
417 
418  # Filename of dataset
419  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
420  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
421  # Metadata from FITS file
422  if subPolicy["storage"] == "FitsStorage": # a FITS image
423  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
424  readMetadata(location.getLocationsWithRoot()[0]))
425 
426  # Add support for configuring FITS compression
427  addName = "add_" + datasetType
428  if not hasattr(self, addName):
429  setattr(self, addName, self.getImageCompressionSettings)
430 
431  if name == "exposures":
432  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
433  afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
434  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
435  afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
436  setMethods("visitInfo",
437  bypassImpl=lambda datasetType, pythonType, location, dataId:
438  afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
439  setMethods("filter",
440  bypassImpl=lambda datasetType, pythonType, location, dataId:
441  afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
442  setMethods("detector",
443  mapImpl=lambda dataId, write=False:
444  dafPersist.ButlerLocation(
445  pythonType="lsst.afw.cameraGeom.CameraConfig",
446  cppType="Config",
447  storageName="Internal",
448  locationList="ignored",
449  dataId=dataId,
450  mapper=self,
451  storage=None,
452  ),
453  bypassImpl=lambda datasetType, pythonType, location, dataId:
454  self.camera[self._extractDetectorName(dataId)]
455  )
456  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
457  afwImage.bboxFromMetadata(
458  readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
459 
460  elif name == "images":
461  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
462  afwImage.bboxFromMetadata(
463  readMetadata(location.getLocationsWithRoot()[0])))
464 
465  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
466  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
467  readMetadata(os.path.join(location.getStorage().root,
468  location.getLocations()[0]), hdu=1))
469 
470  # Sub-images
471  if subPolicy["storage"] == "FitsStorage":
472  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
473  subId = dataId.copy()
474  del subId['bbox']
475  loc = mapping.map(mapper, subId, write)
476  bbox = dataId['bbox']
477  llcX = bbox.getMinX()
478  llcY = bbox.getMinY()
479  width = bbox.getWidth()
480  height = bbox.getHeight()
481  loc.additionalData.set('llcX', llcX)
482  loc.additionalData.set('llcY', llcY)
483  loc.additionalData.set('width', width)
484  loc.additionalData.set('height', height)
485  if 'imageOrigin' in dataId:
486  loc.additionalData.set('imageOrigin',
487  dataId['imageOrigin'])
488  return loc
489 
490  def querySubClosure(key, format, dataId, mapping=mapping):
491  subId = dataId.copy()
492  del subId['bbox']
493  return mapping.lookup(format, subId)
494  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
495 
496  if subPolicy["storage"] == "FitsCatalogStorage":
497  # Length of catalog
498  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
499  readMetadata(os.path.join(location.getStorage().root,
500  location.getLocations()[0]),
501  hdu=1).get("NAXIS2"))
502 
503  # Schema of catalog
504  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
505  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
506  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
507  location.getLocations()[0])))
508 
509  def _computeCcdExposureId(self, dataId):
510  """Compute the 64-bit (long) identifier for a CCD exposure.
511 
512  Subclasses must override
513 
514  Parameters
515  ----------
516  dataId : `dict`
517  Data identifier with visit, ccd.
518  """
519  raise NotImplementedError()
520 
521  def _computeCoaddExposureId(self, dataId, singleFilter):
522  """Compute the 64-bit (long) identifier for a coadd.
523 
524  Subclasses must override
525 
526  Parameters
527  ----------
528  dataId : `dict`
529  Data identifier with tract and patch.
530  singleFilter : `bool`
531  True means the desired ID is for a single-filter coadd, in which
532  case dataIdmust contain filter.
533  """
534  raise NotImplementedError()
535 
536  def _search(self, path):
537  """Search for path in the associated repository's storage.
538 
539  Parameters
540  ----------
541  path : string
542  Path that describes an object in the repository associated with
543  this mapper.
544  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
545  indicator will be stripped when searching and so will match
546  filenames without the HDU indicator, e.g. 'foo.fits'. The path
547  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
548 
549  Returns
550  -------
551  string
552  The path for this object in the repository. Will return None if the
553  object can't be found. If the input argument path contained an HDU
554  indicator, the returned path will also contain the HDU indicator.
555  """
556  return self.rootStorage.search(path)
557 
558  def backup(self, datasetType, dataId):
559  """Rename any existing object with the given type and dataId.
560 
561  The CameraMapper implementation saves objects in a sequence of e.g.:
562 
563  - foo.fits
564  - foo.fits~1
565  - foo.fits~2
566 
567  All of the backups will be placed in the output repo, however, and will
568  not be removed if they are found elsewhere in the _parent chain. This
569  means that the same file will be stored twice if the previous version was
570  found in an input repo.
571  """
572 
573  # Calling PosixStorage directly is not the long term solution in this
574  # function, this is work-in-progress on epic DM-6225. The plan is for
575  # parentSearch to be changed to 'search', and search only the storage
576  # associated with this mapper. All searching of parents will be handled
577  # by traversing the container of repositories in Butler.
578 
579  def firstElement(list):
580  """Get the first element in the list, or None if that can't be done.
581  """
582  return list[0] if list is not None and len(list) else None
583 
584  n = 0
585  newLocation = self.map(datasetType, dataId, write=True)
586  newPath = newLocation.getLocations()[0]
587  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
588  path = firstElement(path)
589  oldPaths = []
590  while path is not None:
591  n += 1
592  oldPaths.append((n, path))
593  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
594  path = firstElement(path)
595  for n, oldPath in reversed(oldPaths):
596  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
597 
598  def keys(self):
599  """Return supported keys.
600 
601  Returns
602  -------
603  iterable
604  List of keys usable in a dataset identifier
605  """
606  return iter(self.keyDict.keys())
607 
608  def getKeys(self, datasetType, level):
609  """Return a dict of supported keys and their value types for a given dataset
610  type at a given level of the key hierarchy.
611 
612  Parameters
613  ----------
614  datasetType : `str`
615  Dataset type or None for all dataset types.
616  level : `str` or None
617  Level or None for all levels or '' for the default level for the
618  camera.
619 
620  Returns
621  -------
622  `dict`
623  Keys are strings usable in a dataset identifier, values are their
624  value types.
625  """
626 
627  # not sure if this is how we want to do this. what if None was intended?
628  if level == '':
629  level = self.getDefaultLevel()
630 
631  if datasetType is None:
632  keyDict = copy.copy(self.keyDict)
633  else:
634  keyDict = self.mappings[datasetType].keys()
635  if level is not None and level in self.levels:
636  keyDict = copy.copy(keyDict)
637  for l in self.levels[level]:
638  if l in keyDict:
639  del keyDict[l]
640  return keyDict
641 
642  def getDefaultLevel(self):
643  return self.defaultLevel
644 
645  def getDefaultSubLevel(self, level):
646  if level in self.defaultSubLevels:
647  return self.defaultSubLevels[level]
648  return None
649 
650  @classmethod
651  def getCameraName(cls):
652  """Return the name of the camera that this CameraMapper is for."""
653  className = str(cls)
654  className = className[className.find('.'):-1]
655  m = re.search(r'(\w+)Mapper', className)
656  if m is None:
657  m = re.search(r"class '[\w.]*?(\w+)'", className)
658  name = m.group(1)
659  return name[:1].lower() + name[1:] if name else ''
660 
661  @classmethod
662  def getPackageName(cls):
663  """Return the name of the package containing this CameraMapper."""
664  if cls.packageName is None:
665  raise ValueError('class variable packageName must not be None')
666  return cls.packageName
667 
668  @classmethod
669  def getPackageDir(cls):
670  """Return the base directory of this package"""
671  return getPackageDir(cls.getPackageName())
672 
673  def map_camera(self, dataId, write=False):
674  """Map a camera dataset."""
675  if self.camera is None:
676  raise RuntimeError("No camera dataset available.")
677  actualId = self._transformId(dataId)
678  return dafPersist.ButlerLocation(
679  pythonType="lsst.afw.cameraGeom.CameraConfig",
680  cppType="Config",
681  storageName="ConfigStorage",
682  locationList=self.cameraDataLocation or "ignored",
683  dataId=actualId,
684  mapper=self,
685  storage=self.rootStorage
686  )
687 
688  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
689  """Return the (preloaded) camera object.
690  """
691  if self.camera is None:
692  raise RuntimeError("No camera dataset available.")
693  return self.camera
694 
695  def map_defects(self, dataId, write=False):
696  """Map defects dataset.
697 
698  Returns
699  -------
700  `lsst.daf.butler.ButlerLocation`
701  Minimal ButlerLocation containing just the locationList field
702  (just enough information that bypass_defects can use it).
703  """
704  defectFitsPath = self._defectLookup(dataId=dataId)
705  if defectFitsPath is None:
706  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
707 
708  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
709  dataId, self,
710  storage=self.rootStorage)
711 
712  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
713  """Return a defect based on the butler location returned by map_defects
714 
715  Parameters
716  ----------
717  butlerLocation : `lsst.daf.persistence.ButlerLocation`
718  locationList = path to defects FITS file
719  dataId : `dict`
720  Butler data ID; "ccd" must be set.
721 
722  Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation
723  into an object, which is what we want for now, since that conversion is a bit tricky.
724  """
725  detectorName = self._extractDetectorName(dataId)
726  defectsFitsPath = butlerLocation.locationList[0]
727  with pyfits.open(defectsFitsPath) as hduList:
728  for hdu in hduList[1:]:
729  if hdu.header["name"] != detectorName:
730  continue
731 
732  defectList = []
733  for data in hdu.data:
734  bbox = afwGeom.Box2I(
735  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
736  afwGeom.Extent2I(int(data['width']), int(data['height'])),
737  )
738  defectList.append(afwImage.DefectBase(bbox))
739  return defectList
740 
741  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
742 
743  def map_expIdInfo(self, dataId, write=False):
744  return dafPersist.ButlerLocation(
745  pythonType="lsst.obs.base.ExposureIdInfo",
746  cppType=None,
747  storageName="Internal",
748  locationList="ignored",
749  dataId=dataId,
750  mapper=self,
751  storage=self.rootStorage
752  )
753 
754  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
755  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
756  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
757  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
758  return ExposureIdInfo(expId=expId, expBits=expBits)
759 
760  def std_bfKernel(self, item, dataId):
761  """Disable standardization for bfKernel
762 
763  bfKernel is a calibration product that is numpy array,
764  unlike other calibration products that are all images;
765  all calibration images are sent through _standardizeExposure
766  due to CalibrationMapping, but we don't want that to happen to bfKernel
767  """
768  return item
769 
770  def std_raw(self, item, dataId):
771  """Standardize a raw dataset by converting it to an Exposure instead of an Image"""
772  return self._standardizeExposure(self.exposures['raw'], item, dataId,
773  trimmed=False, setVisitInfo=True)
774 
775  def map_skypolicy(self, dataId):
776  """Map a sky policy."""
777  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
778  "Internal", None, None, self,
779  storage=self.rootStorage)
780 
781  def std_skypolicy(self, item, dataId):
782  """Standardize a sky policy by returning the one we use."""
783  return self.skypolicy
784 
785 
790 
791  def _getCcdKeyVal(self, dataId):
792  """Return CCD key and value used to look a defect in the defect registry
793 
794  The default implementation simply returns ("ccd", full detector name)
795  """
796  return ("ccd", self._extractDetectorName(dataId))
797 
798  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
799  posixIfNoSql=True):
800  """Set up a registry (usually SQLite3), trying a number of possible
801  paths.
802 
803  Parameters
804  ----------
805  name : string
806  Name of registry.
807  description: `str`
808  Description of registry (for log messages)
809  path : string
810  Path for registry.
811  policy : string
812  Policy that contains the registry name, used if path is None.
813  policyKey : string
814  Key in policy for registry path.
815  storage : Storage subclass
816  Repository Storage to look in.
817  searchParents : bool, optional
818  True if the search for a registry should follow any Butler v1
819  _parent symlinks.
820  posixIfNoSql : bool, optional
821  If an sqlite registry is not found, will create a posix registry if
822  this is True.
823 
824  Returns
825  -------
826  lsst.daf.persistence.Registry
827  Registry object
828  """
829  if path is None and policyKey in policy:
830  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
831  if os.path.isabs(path):
832  raise RuntimeError("Policy should not indicate an absolute path for registry.")
833  if not storage.exists(path):
834  newPath = storage.instanceSearch(path)
835 
836  newPath = newPath[0] if newPath is not None and len(newPath) else None
837  if newPath is None:
838  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
839  path)
840  path = newPath
841  else:
842  self.log.warn("Unable to locate registry at policy path: %s", path)
843  path = None
844 
845  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
846  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
847  # root from path. Currently only works with PosixStorage
848  try:
849  root = storage.root
850  if path and (path.startswith(root)):
851  path = path[len(root + '/'):]
852  except AttributeError:
853  pass
854 
855  # determine if there is an sqlite registry and if not, try the posix registry.
856  registry = None
857 
858  def search(filename, description):
859  """Search for file in storage
860 
861  Parameters
862  ----------
863  filename : `str`
864  Filename to search for
865  description : `str`
866  Description of file, for error message.
867 
868  Returns
869  -------
870  path : `str` or `None`
871  Path to file, or None
872  """
873  result = storage.instanceSearch(filename)
874  if result:
875  return result[0]
876  self.log.debug("Unable to locate %s: %s", description, filename)
877  return None
878 
879  # Search for a suitable registry database
880  if path is None:
881  path = search("%s.pgsql" % name, "%s in root" % description)
882  if path is None:
883  path = search("%s.sqlite3" % name, "%s in root" % description)
884  if path is None:
885  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
886 
887  if path is not None:
888  if not storage.exists(path):
889  newPath = storage.instanceSearch(path)
890  newPath = newPath[0] if newPath is not None and len(newPath) else None
891  if newPath is not None:
892  path = newPath
893  localFileObj = storage.getLocalFile(path)
894  self.log.info("Loading %s registry from %s", description, localFileObj.name)
895  registry = dafPersist.Registry.create(localFileObj.name)
896  localFileObj.close()
897  elif not registry and posixIfNoSql:
898  try:
899  self.log.info("Loading Posix %s registry from %s", description, storage.root)
900  registry = dafPersist.PosixRegistry(storage.root)
901  except:
902  registry = None
903 
904  return registry
905 
906  def _transformId(self, dataId):
907  """Generate a standard ID dict from a camera-specific ID dict.
908 
909  Canonical keys include:
910  - amp: amplifier name
911  - ccd: CCD name (in LSST this is a combination of raft and sensor)
912  The default implementation returns a copy of its input.
913 
914  Parameters
915  ----------
916  dataId : `dict`
917  Dataset identifier; this must not be modified
918 
919  Returns
920  -------
921  `dict`
922  Transformed dataset identifier.
923  """
924 
925  return dataId.copy()
926 
927  def _mapActualToPath(self, template, actualId):
928  """Convert a template path to an actual path, using the actual data
929  identifier. This implementation is usually sufficient but can be
930  overridden by the subclass.
931 
932  Parameters
933  ----------
934  template : `str`
935  Template path
936  actualId : `dict`
937  Dataset identifier
938 
939  Returns
940  -------
941  `str`
942  Pathname
943  """
944 
945  try:
946  transformedId = self._transformId(actualId)
947  return template % transformedId
948  except Exception as e:
949  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
950 
951  @staticmethod
952  def getShortCcdName(ccdName):
953  """Convert a CCD name to a form useful as a filename
954 
955  The default implementation converts spaces to underscores.
956  """
957  return ccdName.replace(" ", "_")
958 
959  def _extractDetectorName(self, dataId):
960  """Extract the detector (CCD) name from the dataset identifier.
961 
962  The name in question is the detector name used by lsst.afw.cameraGeom.
963 
964  Parameters
965  ----------
966  dataId : `dict`
967  Dataset identifier.
968 
969  Returns
970  -------
971  `str`
972  Detector name
973  """
974  raise NotImplementedError("No _extractDetectorName() function specified")
975 
976  def _extractAmpId(self, dataId):
977  """Extract the amplifier identifer from a dataset identifier.
978 
979  .. note:: Deprecated in 11_0
980 
981  amplifier identifier has two parts: the detector name for the CCD
982  containing the amplifier and index of the amplifier in the detector.
983 
984  Parameters
985  ----------
986  dataId : `dict`
987  Dataset identifer
988 
989  Returns
990  -------
991  `tuple`
992  Amplifier identifier
993  """
994 
995  trDataId = self._transformId(dataId)
996  return (trDataId["ccd"], int(trDataId['amp']))
997 
998  def _setAmpDetector(self, item, dataId, trimmed=True):
999  """Set the detector object in an Exposure for an amplifier.
1000 
1001  Defects are also added to the Exposure based on the detector object.
1002 
1003  Parameters
1004  ----------
1005  item : `lsst.afw.image.Exposure`
1006  Exposure to set the detector in.
1007  dataId : `dict`
1008  Dataset identifier
1009  trimmed : `bool`
1010  Should detector be marked as trimmed? (ignored)
1011  """
1012 
1013  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1014 
1015  def _setCcdDetector(self, item, dataId, trimmed=True):
1016  """Set the detector object in an Exposure for a CCD.
1017 
1018  Parameters
1019  ----------
1020  item : `lsst.afw.image.Exposure`
1021  Exposure to set the detector in.
1022  dataId : `dict`
1023  Dataset identifier
1024  trimmed : `bool`
1025  Should detector be marked as trimmed? (ignored)
1026  """
1027  if item.getDetector() is not None:
1028  return
1029 
1030  detectorName = self._extractDetectorName(dataId)
1031  detector = self.camera[detectorName]
1032  item.setDetector(detector)
1033 
1034  def _setFilter(self, mapping, item, dataId):
1035  """Set the filter object in an Exposure. If the Exposure had a FILTER
1036  keyword, this was already processed during load. But if it didn't,
1037  use the filter from the registry.
1038 
1039  Parameters
1040  ----------
1041  mapping : `lsst.obs.base.Mapping`
1042  Where to get the filter from.
1043  item : `lsst.afw.image.Exposure`
1044  Exposure to set the filter in.
1045  dataId : `dict`
1046  Dataset identifier.
1047  """
1048 
1049  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1050  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1051  return
1052 
1053  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1054  return
1055 
1056  actualId = mapping.need(['filter'], dataId)
1057  filterName = actualId['filter']
1058  if self.filters is not None and filterName in self.filters:
1059  filterName = self.filters[filterName]
1060  item.setFilter(afwImage.Filter(filterName))
1061 
1062  # Default standardization function for exposures
1063  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1064  trimmed=True, setVisitInfo=True):
1065  """Default standardization function for images.
1066 
1067  This sets the Detector from the camera geometry
1068  and optionally set the Fiter. In both cases this saves
1069  having to persist some data in each exposure (or image).
1070 
1071  Parameters
1072  ----------
1073  mapping : `lsst.obs.base.Mapping`
1074  Where to get the values from.
1075  item : image-like object
1076  Can be any of lsst.afw.image.Exposure,
1077  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1078  or lsst.afw.image.MaskedImage
1079 
1080  dataId : `dict`
1081  Dataset identifier
1082  filter : `bool`
1083  Set filter? Ignored if item is already an exposure
1084  trimmed : `bool`
1085  Should detector be marked as trimmed?
1086  setVisitInfo : `bool`
1087  Should Exposure have its VisitInfo filled out from the metadata?
1088 
1089  Returns
1090  -------
1091  `lsst.afw.image.Exposure`
1092  The standardized Exposure.
1093  """
1094  try:
1095  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
1096  except Exception as e:
1097  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1098  raise
1099 
1100  if mapping.level.lower() == "amp":
1101  self._setAmpDetector(item, dataId, trimmed)
1102  elif mapping.level.lower() == "ccd":
1103  self._setCcdDetector(item, dataId, trimmed)
1104 
1105  if filter:
1106  self._setFilter(mapping, item, dataId)
1107 
1108  return item
1109 
1110  def _defectLookup(self, dataId):
1111  """Find the defects for a given CCD.
1112 
1113  Parameters
1114  ----------
1115  dataId : `dict`
1116  Dataset identifier
1117 
1118  Returns
1119  -------
1120  `str`
1121  Path to the defects file or None if not available.
1122  """
1123  if self.defectRegistry is None:
1124  return None
1125  if self.registry is None:
1126  raise RuntimeError("No registry for defect lookup")
1127 
1128  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1129 
1130  dataIdForLookup = {'visit': dataId['visit']}
1131  # .lookup will fail in a posix registry because there is no template to provide.
1132  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
1133  if len(rows) == 0:
1134  return None
1135  assert len(rows) == 1
1136  taiObs = rows[0][0]
1137 
1138  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1139  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1140  [(ccdKey, "?")],
1141  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1142  (ccdVal, taiObs))
1143  if not rows or len(rows) == 0:
1144  return None
1145  if len(rows) == 1:
1146  return os.path.join(self.defectPath, rows[0][0])
1147  else:
1148  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1149  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
1150 
1151  def _makeCamera(self, policy, repositoryDir):
1152  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry
1153 
1154  Also set self.cameraDataLocation, if relevant (else it can be left None).
1155 
1156  This implementation assumes that policy contains an entry "camera" that points to the
1157  subdirectory in this package of camera data; specifically, that subdirectory must contain:
1158  - a file named `camera.py` that contains persisted camera config
1159  - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath
1160 
1161  Parameters
1162  ----------
1163  policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy`
1164  Policy with per-camera defaults already merged
1165  (PexPolicy only for backward compatibility).
1166  repositoryDir : `str`
1167  Policy repository for the subclassing module (obtained with
1168  getRepositoryPath() on the per-camera default dictionary).
1169  """
1170  if isinstance(policy, pexPolicy.Policy):
1171  policy = dafPersist.Policy(pexPolicy=policy)
1172  if 'camera' not in policy:
1173  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1174  cameraDataSubdir = policy['camera']
1175  self.cameraDataLocation = os.path.normpath(
1176  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1177  cameraConfig = afwCameraGeom.CameraConfig()
1178  cameraConfig.load(self.cameraDataLocation)
1179  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1180  return afwCameraGeom.makeCameraFromPath(
1181  cameraConfig=cameraConfig,
1182  ampInfoPath=ampInfoPath,
1183  shortNameFunc=self.getShortCcdName,
1184  pupilFactoryClass=self.PupilFactoryClass
1185  )
1186 
1187  def getRegistry(self):
1188  """Get the registry used by this mapper.
1189 
1190  Returns
1191  -------
1192  Registry or None
1193  The registry used by this mapper for this mapper's repository.
1194  """
1195  return self.registry
1196 
1197  def getImageCompressionSettings(self, datasetType, dataId):
1198  """Stuff image compression settings into a daf.base.PropertySet
1199 
1200  This goes into the ButlerLocation's "additionalData", which gets
1201  passed into the boost::persistence framework.
1202 
1203  Parameters
1204  ----------
1205  datasetType : `str`
1206  Type of dataset for which to get the image compression settings.
1207  dataId : `dict`
1208  Dataset identifier.
1209 
1210  Returns
1211  -------
1212  additionalData : `lsst.daf.base.PropertySet`
1213  Image compression settings.
1214  """
1215  mapping = self.mappings[datasetType]
1216  recipeName = mapping.recipe
1217  storageType = mapping.storage
1218  if storageType not in self._writeRecipes:
1219  return dafBase.PropertySet()
1220  if recipeName not in self._writeRecipes[storageType]:
1221  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1222  (datasetType, storageType, recipeName))
1223  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1224  seed = hash(tuple(dataId.items())) % 2**31
1225  for plane in ("image", "mask", "variance"):
1226  if recipe.exists(plane + ".scaling.seed") and recipe.get(plane + ".scaling.seed") == 0:
1227  recipe.set(plane + ".scaling.seed", seed)
1228  return recipe
1229 
1230  def _initWriteRecipes(self):
1231  """Read the recipes for writing files
1232 
1233  These recipes are currently used for configuring FITS compression,
1234  but they could have wider uses for configuring different flavors
1235  of the storage types. A recipe is referred to by a symbolic name,
1236  which has associated settings. These settings are stored as a
1237  `PropertySet` so they can easily be passed down to the
1238  boost::persistence framework as the "additionalData" parameter.
1239 
1240  The list of recipes is written in YAML. A default recipe and
1241  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1242  and these may be overridden or supplemented by the individual obs_*
1243  packages' own policy/writeRecipes.yaml files.
1244 
1245  Recipes are grouped by the storage type. Currently, only the
1246  ``FitsStorage`` storage type uses recipes, which uses it to
1247  configure FITS image compression.
1248 
1249  Each ``FitsStorage`` recipe for FITS compression should define
1250  "image", "mask" and "variance" entries, each of which may contain
1251  "compression" and "scaling" entries. Defaults will be provided for
1252  any missing elements under "compression" and "scaling".
1253 
1254  The allowed entries under "compression" are:
1255 
1256  * algorithm (string): compression algorithm to use
1257  * rows (int): number of rows per tile (0 = entire dimension)
1258  * columns (int): number of columns per tile (0 = entire dimension)
1259  * quantizeLevel (float): cfitsio quantization level
1260 
1261  The allowed entries under "scaling" are:
1262 
1263  * algorithm (string): scaling algorithm to use
1264  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1265  * fuzz (bool): fuzz the values when quantising floating-point values?
1266  * seed (long): seed for random number generator when fuzzing
1267  * maskPlanes (list of string): mask planes to ignore when doing statistics
1268  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1269  * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE)
1270  * bscale: manually specified BSCALE (for MANUAL scaling)
1271  * bzero: manually specified BSCALE (for MANUAL scaling)
1272 
1273  A very simple example YAML recipe:
1274 
1275  FitsStorage:
1276  default:
1277  image: &default
1278  compression:
1279  algorithm: GZIP_SHUFFLE
1280  mask: *default
1281  variance: *default
1282  """
1283  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1284  recipes = dafPersist.Policy(recipesFile)
1285  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1286  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1287  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1288  supplements = dafPersist.Policy(supplementsFile)
1289  # Don't allow overrides, only supplements
1290  for entry in validationMenu:
1291  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1292  if intersection:
1293  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1294  (supplementsFile, entry, recipesFile, intersection))
1295  recipes.update(supplements)
1296 
1297  self._writeRecipes = {}
1298  for storageType in recipes.names(True):
1299  if "default" not in recipes[storageType]:
1300  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1301  (storageType, recipesFile))
1302  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1303 
1304 
1305 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1306  """Generate an Exposure from an image-like object
1307 
1308  If the image is a DecoratedImage then also set its WCS and metadata
1309  (Image and MaskedImage are missing the necessary metadata
1310  and Exposure already has those set)
1311 
1312  Parameters
1313  ----------
1314  image : Image-like object
1315  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1316  Exposure.
1317 
1318  Returns
1319  -------
1320  `lsst.afw.image.Exposure`
1321  Exposure containing input image.
1322  """
1323  metadata = None
1324  if isinstance(image, afwImage.MaskedImage):
1325  exposure = afwImage.makeExposure(image)
1326  elif isinstance(image, afwImage.DecoratedImage):
1327  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1328  metadata = image.getMetadata()
1329  try:
1330  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1331  exposure.setWcs(wcs)
1332  except pexExcept.TypeError as e:
1333  # raised on failure to create a wcs (and possibly others)
1334  if logger is None:
1335  logger = lsstLog.Log.getLogger("CameraMapper")
1336  logger.warn("wcs set to None; insufficient information found in metadata to create a valid wcs: "
1337  "%s", e.args[0])
1338 
1339  exposure.setMetadata(metadata)
1340  elif isinstance(image, afwImage.Exposure):
1341  # Exposure
1342  exposure = image
1343  metadata = exposure.getMetadata()
1344  else:
1345  # Image
1346  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1347  #
1348  # set VisitInfo if we can
1349  #
1350  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1351  if metadata is not None:
1352  if mapper is None:
1353  if not logger:
1354  logger = lsstLog.Log.getLogger("CameraMapper")
1355  logger.warn("I can only set the VisitInfo if you provide a mapper")
1356  else:
1357  exposureId = mapper._computeCcdExposureId(dataId)
1358  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1359 
1360  exposure.getInfo().setVisitInfo(visitInfo)
1361 
1362  return exposure
1363 
1364 
1366  """Validate recipes for FitsStorage
1367 
1368  The recipes are supplemented with default values where appropriate.
1369 
1370  TODO: replace this custom validation code with Cerberus (DM-11846)
1371 
1372  Parameters
1373  ----------
1374  recipes : `lsst.daf.persistence.Policy`
1375  FitsStorage recipes to validate.
1376 
1377  Returns
1378  -------
1379  validated : `lsst.daf.base.PropertySet`
1380  Validated FitsStorage recipe.
1381 
1382  Raises
1383  ------
1384  `RuntimeError`
1385  If validation fails.
1386  """
1387  # Schemas define what should be there, and the default values (and by the default
1388  # value, the expected type).
1389  compressionSchema = {
1390  "algorithm": "NONE",
1391  "rows": 1,
1392  "columns": 0,
1393  "quantizeLevel": 0.0,
1394  }
1395  scalingSchema = {
1396  "algorithm": "NONE",
1397  "bitpix": 0,
1398  "maskPlanes": ["NO_DATA"],
1399  "seed": 0,
1400  "quantizeLevel": 4.0,
1401  "quantizePad": 5.0,
1402  "fuzz": True,
1403  "bscale": 1.0,
1404  "bzero": 0.0,
1405  }
1406 
1407  def checkUnrecognized(entry, allowed, description):
1408  """Check to see if the entry contains unrecognised keywords"""
1409  unrecognized = set(entry.keys()) - set(allowed)
1410  if unrecognized:
1411  raise RuntimeError(
1412  "Unrecognized entries when parsing image compression recipe %s: %s" %
1413  (description, unrecognized))
1414 
1415  validated = {}
1416  for name in recipes.names(True):
1417  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1418  rr = dafBase.PropertySet()
1419  validated[name] = rr
1420  for plane in ("image", "mask", "variance"):
1421  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1422  name + "->" + plane)
1423 
1424  for settings, schema in (("compression", compressionSchema),
1425  ("scaling", scalingSchema)):
1426  prefix = plane + "." + settings
1427  if settings not in recipes[name][plane]:
1428  for key in schema:
1429  rr.set(prefix + "." + key, schema[key])
1430  continue
1431  entry = recipes[name][plane][settings]
1432  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1433  for key in schema:
1434  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1435  rr.set(prefix + "." + key, value)
1436  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.