lsst.obs.base  16.0-7-g2664ab2+1
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import copy
24 import os
25 from astropy.io import fits # required by _makeDefectsDict until defects are written as AFW tables
26 import re
27 import weakref
28 import lsst.daf.persistence as dafPersist
29 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base as dafBase
31 import lsst.afw.geom as afwGeom
32 import lsst.afw.image as afwImage
33 import lsst.afw.table as afwTable
34 from lsst.afw.fits import readMetadata
35 import lsst.afw.cameraGeom as afwCameraGeom
36 import lsst.log as lsstLog
37 import lsst.pex.policy as pexPolicy
38 import lsst.pex.exceptions as pexExcept
39 from .exposureIdInfo import ExposureIdInfo
40 from .makeRawVisitInfo import MakeRawVisitInfo
41 from lsst.utils import getPackageDir
42 
43 __all__ = ["CameraMapper", "exposureFromImage"]
44 
45 
46 class CameraMapper(dafPersist.Mapper):
47 
48  """CameraMapper is a base class for mappers that handle images from a
49  camera and products derived from them. This provides an abstraction layer
50  between the data on disk and the code.
51 
52  Public methods: keys, queryMetadata, getDatasetTypes, map,
53  canStandardize, standardize
54 
55  Mappers for specific data sources (e.g., CFHT Megacam, LSST
56  simulations, etc.) should inherit this class.
57 
58  The CameraMapper manages datasets within a "root" directory. Note that
59  writing to a dataset present in the input root will hide the existing
60  dataset but not overwrite it. See #2160 for design discussion.
61 
62  A camera is assumed to consist of one or more rafts, each composed of
63  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
64  (amps). A camera is also assumed to have a camera geometry description
65  (CameraGeom object) as a policy file, a filter description (Filter class
66  static configuration) as another policy file, and an optional defects
67  description directory.
68 
69  Information from the camera geometry and defects are inserted into all
70  Exposure objects returned.
71 
72  The mapper uses one or two registries to retrieve metadata about the
73  images. The first is a registry of all raw exposures. This must contain
74  the time of the observation. One or more tables (or the equivalent)
75  within the registry are used to look up data identifier components that
76  are not specified by the user (e.g. filter) and to return results for
77  metadata queries. The second is an optional registry of all calibration
78  data. This should contain validity start and end entries for each
79  calibration dataset in the same timescale as the observation time.
80 
81  Subclasses will typically set MakeRawVisitInfoClass:
82 
83  MakeRawVisitInfoClass: a class variable that points to a subclass of
84  MakeRawVisitInfo, a functor that creates an
85  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
86 
87  Subclasses must provide the following methods:
88 
89  _extractDetectorName(self, dataId): returns the detector name for a CCD
90  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
91  a dataset identifier referring to that CCD or a subcomponent of it.
92 
93  _computeCcdExposureId(self, dataId): see below
94 
95  _computeCoaddExposureId(self, dataId, singleFilter): see below
96 
97  Subclasses may also need to override the following methods:
98 
99  _transformId(self, dataId): transformation of a data identifier
100  from colloquial usage (e.g., "ccdname") to proper/actual usage
101  (e.g., "ccd"), including making suitable for path expansion (e.g. removing
102  commas). The default implementation does nothing. Note that this
103  method should not modify its input parameter.
104 
105  getShortCcdName(self, ccdName): a static method that returns a shortened
106  name suitable for use as a filename. The default version converts spaces
107  to underscores.
108 
109  _getCcdKeyVal(self, dataId): return a CCD key and value
110  by which to look up defects in the defects registry.
111  The default value returns ("ccd", detector name)
112 
113  _mapActualToPath(self, template, actualId): convert a template path to an
114  actual path, using the actual dataset identifier.
115 
116  The mapper's behaviors are largely specified by the policy file.
117  See the MapperDictionary.paf for descriptions of the available items.
118 
119  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
120  mappings (see Mappings class).
121 
122  Common default mappings for all subclasses can be specified in the
123  "policy/{images,exposures,calibrations,datasets}.yaml" files. This
124  provides a simple way to add a product to all camera mappers.
125 
126  Functions to map (provide a path to the data given a dataset
127  identifier dictionary) and standardize (convert data into some standard
128  format or type) may be provided in the subclass as "map_{dataset type}"
129  and "std_{dataset type}", respectively.
130 
131  If non-Exposure datasets cannot be retrieved using standard
132  daf_persistence methods alone, a "bypass_{dataset type}" function may be
133  provided in the subclass to return the dataset instead of using the
134  "datasets" subpolicy.
135 
136  Implementations of map_camera and bypass_camera that should typically be
137  sufficient are provided in this base class.
138 
139  Notes
140  -----
141  TODO:
142 
143  - Handle defects the same was as all other calibration products, using the
144  calibration registry
145  - Instead of auto-loading the camera at construction time, load it from
146  the calibration registry
147  - Rewrite defects as AFW tables so we don't need astropy.io.fits to
148  unpersist them; then remove all mention of astropy.io.fits from this
149  package.
150  """
151  packageName = None
152 
153  # a class or subclass of MakeRawVisitInfo, a functor that makes an
154  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
155  MakeRawVisitInfoClass = MakeRawVisitInfo
156 
157  # a class or subclass of PupilFactory
158  PupilFactoryClass = afwCameraGeom.PupilFactory
159 
160  def __init__(self, policy, repositoryDir,
161  root=None, registry=None, calibRoot=None, calibRegistry=None,
162  provided=None, parentRegistry=None, repositoryCfg=None):
163  """Initialize the CameraMapper.
164 
165  Parameters
166  ----------
167  policy : daf_persistence.Policy,
168  Can also be pexPolicy.Policy, only for backward compatibility.
169  Policy with per-camera defaults already merged.
170  repositoryDir : string
171  Policy repository for the subclassing module (obtained with
172  getRepositoryPath() on the per-camera default dictionary).
173  root : string, optional
174  Path to the root directory for data.
175  registry : string, optional
176  Path to registry with data's metadata.
177  calibRoot : string, optional
178  Root directory for calibrations.
179  calibRegistry : string, optional
180  Path to registry with calibrations' metadata.
181  provided : list of string, optional
182  Keys provided by the mapper.
183  parentRegistry : Registry subclass, optional
184  Registry from a parent repository that may be used to look up
185  data's metadata.
186  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
187  The configuration information for the repository this mapper is
188  being used with.
189  """
190 
191  dafPersist.Mapper.__init__(self)
192 
193  self.log = lsstLog.Log.getLogger("CameraMapper")
194 
195  if root:
196  self.root = root
197  elif repositoryCfg:
198  self.root = repositoryCfg.root
199  else:
200  self.root = None
201  if isinstance(policy, pexPolicy.Policy):
202  policy = dafPersist.Policy(policy)
203 
204  repoPolicy = repositoryCfg.policy if repositoryCfg else None
205  if repoPolicy is not None:
206  policy.update(repoPolicy)
207 
208  defaultPolicyFile = dafPersist.Policy.defaultPolicyFile("obs_base",
209  "MapperDictionary.paf",
210  "policy")
211  dictPolicy = dafPersist.Policy(defaultPolicyFile)
212  policy.merge(dictPolicy)
213 
214  # Levels
215  self.levels = dict()
216  if 'levels' in policy:
217  levelsPolicy = policy['levels']
218  for key in levelsPolicy.names(True):
219  self.levels[key] = set(levelsPolicy.asArray(key))
220  self.defaultLevel = policy['defaultLevel']
221  self.defaultSubLevels = dict()
222  if 'defaultSubLevels' in policy:
223  self.defaultSubLevels = policy['defaultSubLevels']
224 
225  # Root directories
226  if root is None:
227  root = "."
228  root = dafPersist.LogicalLocation(root).locString()
229 
230  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
231 
232  # If the calibRoot is passed in, use that. If not and it's indicated in
233  # the policy, use that. And otherwise, the calibs are in the regular
234  # root.
235  # If the location indicated by the calib root does not exist, do not
236  # create it.
237  calibStorage = None
238  if calibRoot is not None:
239  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
240  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
241  create=False)
242  else:
243  calibRoot = policy.get('calibRoot', None)
244  if calibRoot:
245  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
246  create=False)
247  if calibStorage is None:
248  calibStorage = self.rootStorage
249 
250  self.root = root
251 
252  # Registries
253  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
254  self.rootStorage, searchParents=False,
255  posixIfNoSql=(not parentRegistry))
256  if not self.registry:
257  self.registry = parentRegistry
258  needCalibRegistry = policy.get('needCalibRegistry', None)
259  if needCalibRegistry:
260  if calibStorage:
261  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
262  "calibRegistryPath", calibStorage,
263  posixIfNoSql=False) # NB never use posix for calibs
264  else:
265  raise RuntimeError(
266  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
267  "calibRoot ivar:%s or policy['calibRoot']:%s" %
268  (calibRoot, policy.get('calibRoot', None)))
269  else:
270  self.calibRegistry = None
271 
272  # Dict of valid keys and their value types
273  self.keyDict = dict()
274 
275  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
276  self._initWriteRecipes()
277 
278  # Camera geometry
279  self.cameraDataLocation = None # path to camera geometry config file
280  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
281 
282  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
283  # camera package, which is on the local filesystem.
284  self.defectRegistry = None
285  if 'defects' in policy:
286  self.defectPath = os.path.join(repositoryDir, policy['defects'])
287  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
288  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
289 
290  # Filter translation table
291  self.filters = None
292 
293  # verify that the class variable packageName is set before attempting
294  # to instantiate an instance
295  if self.packageName is None:
296  raise ValueError('class variable packageName must not be None')
297 
299 
300  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
301  """Initialize mappings
302 
303  For each of the dataset types that we want to be able to read, there
304  are methods that can be created to support them:
305  * map_<dataset> : determine the path for dataset
306  * std_<dataset> : standardize the retrieved dataset
307  * bypass_<dataset> : retrieve the dataset (bypassing the usual
308  retrieval machinery)
309  * query_<dataset> : query the registry
310 
311  Besides the dataset types explicitly listed in the policy, we create
312  additional, derived datasets for additional conveniences,
313  e.g., reading the header of an image, retrieving only the size of a
314  catalog.
315 
316  Parameters
317  ----------
318  policy : `lsst.daf.persistence.Policy`
319  Policy with per-camera defaults already merged
320  rootStorage : `Storage subclass instance`
321  Interface to persisted repository data.
322  calibRoot : `Storage subclass instance`
323  Interface to persisted calib repository data
324  provided : `list` of `str`
325  Keys provided by the mapper
326  """
327  # Sub-dictionaries (for exposure/calibration/dataset types)
328  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
329  "obs_base", "ImageMappingDictionary.paf", "policy"))
330  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
331  "obs_base", "ExposureMappingDictionary.paf", "policy"))
332  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
333  "obs_base", "CalibrationMappingDictionary.paf", "policy"))
334  dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
335  "obs_base", "DatasetMappingDictionary.paf", "policy"))
336 
337  # Mappings
338  mappingList = (
339  ("images", imgMappingPolicy, ImageMapping),
340  ("exposures", expMappingPolicy, ExposureMapping),
341  ("calibrations", calMappingPolicy, CalibrationMapping),
342  ("datasets", dsMappingPolicy, DatasetMapping)
343  )
344  self.mappings = dict()
345  for name, defPolicy, cls in mappingList:
346  if name in policy:
347  datasets = policy[name]
348 
349  # Centrally-defined datasets
350  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
351  if os.path.exists(defaultsPath):
352  datasets.merge(dafPersist.Policy(defaultsPath))
353 
354  mappings = dict()
355  setattr(self, name, mappings)
356  for datasetType in datasets.names(True):
357  subPolicy = datasets[datasetType]
358  subPolicy.merge(defPolicy)
359 
360  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
361  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
362  subPolicy=subPolicy):
363  components = subPolicy.get('composite')
364  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
365  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
366  python = subPolicy['python']
367  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
368  disassembler=disassembler,
369  python=python,
370  dataId=dataId,
371  mapper=self)
372  for name, component in components.items():
373  butlerComposite.add(id=name,
374  datasetType=component.get('datasetType'),
375  setter=component.get('setter', None),
376  getter=component.get('getter', None),
377  subset=component.get('subset', False),
378  inputOnly=component.get('inputOnly', False))
379  return butlerComposite
380  setattr(self, "map_" + datasetType, compositeClosure)
381  # for now at least, don't set up any other handling for this dataset type.
382  continue
383 
384  if name == "calibrations":
385  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
386  provided=provided, dataRoot=rootStorage)
387  else:
388  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
389  self.keyDict.update(mapping.keys())
390  mappings[datasetType] = mapping
391  self.mappings[datasetType] = mapping
392  if not hasattr(self, "map_" + datasetType):
393  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
394  return mapping.map(mapper, dataId, write)
395  setattr(self, "map_" + datasetType, mapClosure)
396  if not hasattr(self, "query_" + datasetType):
397  def queryClosure(format, dataId, mapping=mapping):
398  return mapping.lookup(format, dataId)
399  setattr(self, "query_" + datasetType, queryClosure)
400  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
401  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
402  return mapping.standardize(mapper, item, dataId)
403  setattr(self, "std_" + datasetType, stdClosure)
404 
405  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
406  """Set convenience methods on CameraMapper"""
407  mapName = "map_" + datasetType + "_" + suffix
408  bypassName = "bypass_" + datasetType + "_" + suffix
409  queryName = "query_" + datasetType + "_" + suffix
410  if not hasattr(self, mapName):
411  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
412  if not hasattr(self, bypassName):
413  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
414  bypassImpl = getattr(self, "bypass_" + datasetType)
415  if bypassImpl is not None:
416  setattr(self, bypassName, bypassImpl)
417  if not hasattr(self, queryName):
418  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
419 
420  # Filename of dataset
421  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
422  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
423  # Metadata from FITS file
424  if subPolicy["storage"] == "FitsStorage": # a FITS image
425  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
426  readMetadata(location.getLocationsWithRoot()[0]))
427 
428  # Add support for configuring FITS compression
429  addName = "add_" + datasetType
430  if not hasattr(self, addName):
431  setattr(self, addName, self.getImageCompressionSettings)
432 
433  if name == "exposures":
434  setMethods("wcs", bypassImpl=lambda datasetType, pythonType, location, dataId:
435  afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
436  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
437  afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
438  setMethods("visitInfo",
439  bypassImpl=lambda datasetType, pythonType, location, dataId:
440  afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
441  setMethods("filter",
442  bypassImpl=lambda datasetType, pythonType, location, dataId:
443  afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
444  setMethods("detector",
445  mapImpl=lambda dataId, write=False:
446  dafPersist.ButlerLocation(
447  pythonType="lsst.afw.cameraGeom.CameraConfig",
448  cppType="Config",
449  storageName="Internal",
450  locationList="ignored",
451  dataId=dataId,
452  mapper=self,
453  storage=None,
454  ),
455  bypassImpl=lambda datasetType, pythonType, location, dataId:
456  self.camera[self._extractDetectorName(dataId)]
457  )
458  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
459  afwImage.bboxFromMetadata(
460  readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
461 
462  elif name == "images":
463  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
464  afwImage.bboxFromMetadata(
465  readMetadata(location.getLocationsWithRoot()[0])))
466 
467  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
468  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
469  readMetadata(os.path.join(location.getStorage().root,
470  location.getLocations()[0]), hdu=1))
471 
472  # Sub-images
473  if subPolicy["storage"] == "FitsStorage":
474  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
475  subId = dataId.copy()
476  del subId['bbox']
477  loc = mapping.map(mapper, subId, write)
478  bbox = dataId['bbox']
479  llcX = bbox.getMinX()
480  llcY = bbox.getMinY()
481  width = bbox.getWidth()
482  height = bbox.getHeight()
483  loc.additionalData.set('llcX', llcX)
484  loc.additionalData.set('llcY', llcY)
485  loc.additionalData.set('width', width)
486  loc.additionalData.set('height', height)
487  if 'imageOrigin' in dataId:
488  loc.additionalData.set('imageOrigin',
489  dataId['imageOrigin'])
490  return loc
491 
492  def querySubClosure(key, format, dataId, mapping=mapping):
493  subId = dataId.copy()
494  del subId['bbox']
495  return mapping.lookup(format, subId)
496  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
497 
498  if subPolicy["storage"] == "FitsCatalogStorage":
499  # Length of catalog
500  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
501  readMetadata(os.path.join(location.getStorage().root,
502  location.getLocations()[0]),
503  hdu=1).getScalar("NAXIS2"))
504 
505  # Schema of catalog
506  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
507  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
508  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
509  location.getLocations()[0])))
510 
511  def _computeCcdExposureId(self, dataId):
512  """Compute the 64-bit (long) identifier for a CCD exposure.
513 
514  Subclasses must override
515 
516  Parameters
517  ----------
518  dataId : `dict`
519  Data identifier with visit, ccd.
520  """
521  raise NotImplementedError()
522 
523  def _computeCoaddExposureId(self, dataId, singleFilter):
524  """Compute the 64-bit (long) identifier for a coadd.
525 
526  Subclasses must override
527 
528  Parameters
529  ----------
530  dataId : `dict`
531  Data identifier with tract and patch.
532  singleFilter : `bool`
533  True means the desired ID is for a single-filter coadd, in which
534  case dataIdmust contain filter.
535  """
536  raise NotImplementedError()
537 
538  def _search(self, path):
539  """Search for path in the associated repository's storage.
540 
541  Parameters
542  ----------
543  path : string
544  Path that describes an object in the repository associated with
545  this mapper.
546  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
547  indicator will be stripped when searching and so will match
548  filenames without the HDU indicator, e.g. 'foo.fits'. The path
549  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
550 
551  Returns
552  -------
553  string
554  The path for this object in the repository. Will return None if the
555  object can't be found. If the input argument path contained an HDU
556  indicator, the returned path will also contain the HDU indicator.
557  """
558  return self.rootStorage.search(path)
559 
560  def backup(self, datasetType, dataId):
561  """Rename any existing object with the given type and dataId.
562 
563  The CameraMapper implementation saves objects in a sequence of e.g.:
564 
565  - foo.fits
566  - foo.fits~1
567  - foo.fits~2
568 
569  All of the backups will be placed in the output repo, however, and will
570  not be removed if they are found elsewhere in the _parent chain. This
571  means that the same file will be stored twice if the previous version
572  was found in an input repo.
573  """
574 
575  # Calling PosixStorage directly is not the long term solution in this
576  # function, this is work-in-progress on epic DM-6225. The plan is for
577  # parentSearch to be changed to 'search', and search only the storage
578  # associated with this mapper. All searching of parents will be handled
579  # by traversing the container of repositories in Butler.
580 
581  def firstElement(list):
582  """Get the first element in the list, or None if that can't be
583  done.
584  """
585  return list[0] if list is not None and len(list) else None
586 
587  n = 0
588  newLocation = self.map(datasetType, dataId, write=True)
589  newPath = newLocation.getLocations()[0]
590  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
591  path = firstElement(path)
592  oldPaths = []
593  while path is not None:
594  n += 1
595  oldPaths.append((n, path))
596  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
597  path = firstElement(path)
598  for n, oldPath in reversed(oldPaths):
599  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
600 
601  def keys(self):
602  """Return supported keys.
603 
604  Returns
605  -------
606  iterable
607  List of keys usable in a dataset identifier
608  """
609  return iter(self.keyDict.keys())
610 
611  def getKeys(self, datasetType, level):
612  """Return a dict of supported keys and their value types for a given
613  dataset type at a given level of the key hierarchy.
614 
615  Parameters
616  ----------
617  datasetType : `str`
618  Dataset type or None for all dataset types.
619  level : `str` or None
620  Level or None for all levels or '' for the default level for the
621  camera.
622 
623  Returns
624  -------
625  `dict`
626  Keys are strings usable in a dataset identifier, values are their
627  value types.
628  """
629 
630  # not sure if this is how we want to do this. what if None was intended?
631  if level == '':
632  level = self.getDefaultLevel()
633 
634  if datasetType is None:
635  keyDict = copy.copy(self.keyDict)
636  else:
637  keyDict = self.mappings[datasetType].keys()
638  if level is not None and level in self.levels:
639  keyDict = copy.copy(keyDict)
640  for l in self.levels[level]:
641  if l in keyDict:
642  del keyDict[l]
643  return keyDict
644 
645  def getDefaultLevel(self):
646  return self.defaultLevel
647 
648  def getDefaultSubLevel(self, level):
649  if level in self.defaultSubLevels:
650  return self.defaultSubLevels[level]
651  return None
652 
653  @classmethod
654  def getCameraName(cls):
655  """Return the name of the camera that this CameraMapper is for."""
656  className = str(cls)
657  className = className[className.find('.'):-1]
658  m = re.search(r'(\w+)Mapper', className)
659  if m is None:
660  m = re.search(r"class '[\w.]*?(\w+)'", className)
661  name = m.group(1)
662  return name[:1].lower() + name[1:] if name else ''
663 
664  @classmethod
665  def getPackageName(cls):
666  """Return the name of the package containing this CameraMapper."""
667  if cls.packageName is None:
668  raise ValueError('class variable packageName must not be None')
669  return cls.packageName
670 
671  @classmethod
672  def getPackageDir(cls):
673  """Return the base directory of this package"""
674  return getPackageDir(cls.getPackageName())
675 
676  def map_camera(self, dataId, write=False):
677  """Map a camera dataset."""
678  if self.camera is None:
679  raise RuntimeError("No camera dataset available.")
680  actualId = self._transformId(dataId)
681  return dafPersist.ButlerLocation(
682  pythonType="lsst.afw.cameraGeom.CameraConfig",
683  cppType="Config",
684  storageName="ConfigStorage",
685  locationList=self.cameraDataLocation or "ignored",
686  dataId=actualId,
687  mapper=self,
688  storage=self.rootStorage
689  )
690 
691  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
692  """Return the (preloaded) camera object.
693  """
694  if self.camera is None:
695  raise RuntimeError("No camera dataset available.")
696  return self.camera
697 
698  def map_defects(self, dataId, write=False):
699  """Map defects dataset.
700 
701  Returns
702  -------
703  `lsst.daf.butler.ButlerLocation`
704  Minimal ButlerLocation containing just the locationList field
705  (just enough information that bypass_defects can use it).
706  """
707  defectFitsPath = self._defectLookup(dataId=dataId)
708  if defectFitsPath is None:
709  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
710 
711  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
712  dataId, self,
713  storage=self.rootStorage)
714 
715  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
716  """Return a defect based on the butler location returned by map_defects
717 
718  Parameters
719  ----------
720  butlerLocation : `lsst.daf.persistence.ButlerLocation`
721  locationList = path to defects FITS file
722  dataId : `dict`
723  Butler data ID; "ccd" must be set.
724 
725  Note: the name "bypass_XXX" means the butler makes no attempt to
726  convert the ButlerLocation into an object, which is what we want for
727  now, since that conversion is a bit tricky.
728  """
729  detectorName = self._extractDetectorName(dataId)
730  defectsFitsPath = butlerLocation.locationList[0]
731  with fits.open(defectsFitsPath) as hduList:
732  for hdu in hduList[1:]:
733  if hdu.header["name"] != detectorName:
734  continue
735 
736  defectList = []
737  for data in hdu.data:
738  bbox = afwGeom.Box2I(
739  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
740  afwGeom.Extent2I(int(data['width']), int(data['height'])),
741  )
742  defectList.append(afwImage.DefectBase(bbox))
743  return defectList
744 
745  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
746 
747  def map_expIdInfo(self, dataId, write=False):
748  return dafPersist.ButlerLocation(
749  pythonType="lsst.obs.base.ExposureIdInfo",
750  cppType=None,
751  storageName="Internal",
752  locationList="ignored",
753  dataId=dataId,
754  mapper=self,
755  storage=self.rootStorage
756  )
757 
758  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
759  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
760  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
761  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
762  return ExposureIdInfo(expId=expId, expBits=expBits)
763 
764  def std_bfKernel(self, item, dataId):
765  """Disable standardization for bfKernel
766 
767  bfKernel is a calibration product that is numpy array,
768  unlike other calibration products that are all images;
769  all calibration images are sent through _standardizeExposure
770  due to CalibrationMapping, but we don't want that to happen to bfKernel
771  """
772  return item
773 
774  def std_raw(self, item, dataId):
775  """Standardize a raw dataset by converting it to an Exposure instead
776  of an Image"""
777  return self._standardizeExposure(self.exposures['raw'], item, dataId,
778  trimmed=False, setVisitInfo=True)
779 
780  def map_skypolicy(self, dataId):
781  """Map a sky policy."""
782  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
783  "Internal", None, None, self,
784  storage=self.rootStorage)
785 
786  def std_skypolicy(self, item, dataId):
787  """Standardize a sky policy by returning the one we use."""
788  return self.skypolicy
789 
790 
795 
796  def _getCcdKeyVal(self, dataId):
797  """Return CCD key and value used to look a defect in the defect
798  registry
799 
800  The default implementation simply returns ("ccd", full detector name)
801  """
802  return ("ccd", self._extractDetectorName(dataId))
803 
804  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
805  posixIfNoSql=True):
806  """Set up a registry (usually SQLite3), trying a number of possible
807  paths.
808 
809  Parameters
810  ----------
811  name : string
812  Name of registry.
813  description: `str`
814  Description of registry (for log messages)
815  path : string
816  Path for registry.
817  policy : string
818  Policy that contains the registry name, used if path is None.
819  policyKey : string
820  Key in policy for registry path.
821  storage : Storage subclass
822  Repository Storage to look in.
823  searchParents : bool, optional
824  True if the search for a registry should follow any Butler v1
825  _parent symlinks.
826  posixIfNoSql : bool, optional
827  If an sqlite registry is not found, will create a posix registry if
828  this is True.
829 
830  Returns
831  -------
832  lsst.daf.persistence.Registry
833  Registry object
834  """
835  if path is None and policyKey in policy:
836  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
837  if os.path.isabs(path):
838  raise RuntimeError("Policy should not indicate an absolute path for registry.")
839  if not storage.exists(path):
840  newPath = storage.instanceSearch(path)
841 
842  newPath = newPath[0] if newPath is not None and len(newPath) else None
843  if newPath is None:
844  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
845  path)
846  path = newPath
847  else:
848  self.log.warn("Unable to locate registry at policy path: %s", path)
849  path = None
850 
851  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
852  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
853  # root from path. Currently only works with PosixStorage
854  try:
855  root = storage.root
856  if path and (path.startswith(root)):
857  path = path[len(root + '/'):]
858  except AttributeError:
859  pass
860 
861  # determine if there is an sqlite registry and if not, try the posix registry.
862  registry = None
863 
864  def search(filename, description):
865  """Search for file in storage
866 
867  Parameters
868  ----------
869  filename : `str`
870  Filename to search for
871  description : `str`
872  Description of file, for error message.
873 
874  Returns
875  -------
876  path : `str` or `None`
877  Path to file, or None
878  """
879  result = storage.instanceSearch(filename)
880  if result:
881  return result[0]
882  self.log.debug("Unable to locate %s: %s", description, filename)
883  return None
884 
885  # Search for a suitable registry database
886  if path is None:
887  path = search("%s.pgsql" % name, "%s in root" % description)
888  if path is None:
889  path = search("%s.sqlite3" % name, "%s in root" % description)
890  if path is None:
891  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
892 
893  if path is not None:
894  if not storage.exists(path):
895  newPath = storage.instanceSearch(path)
896  newPath = newPath[0] if newPath is not None and len(newPath) else None
897  if newPath is not None:
898  path = newPath
899  localFileObj = storage.getLocalFile(path)
900  self.log.info("Loading %s registry from %s", description, localFileObj.name)
901  registry = dafPersist.Registry.create(localFileObj.name)
902  localFileObj.close()
903  elif not registry and posixIfNoSql:
904  try:
905  self.log.info("Loading Posix %s registry from %s", description, storage.root)
906  registry = dafPersist.PosixRegistry(storage.root)
907  except Exception:
908  registry = None
909 
910  return registry
911 
912  def _transformId(self, dataId):
913  """Generate a standard ID dict from a camera-specific ID dict.
914 
915  Canonical keys include:
916  - amp: amplifier name
917  - ccd: CCD name (in LSST this is a combination of raft and sensor)
918  The default implementation returns a copy of its input.
919 
920  Parameters
921  ----------
922  dataId : `dict`
923  Dataset identifier; this must not be modified
924 
925  Returns
926  -------
927  `dict`
928  Transformed dataset identifier.
929  """
930 
931  return dataId.copy()
932 
933  def _mapActualToPath(self, template, actualId):
934  """Convert a template path to an actual path, using the actual data
935  identifier. This implementation is usually sufficient but can be
936  overridden by the subclass.
937 
938  Parameters
939  ----------
940  template : `str`
941  Template path
942  actualId : `dict`
943  Dataset identifier
944 
945  Returns
946  -------
947  `str`
948  Pathname
949  """
950 
951  try:
952  transformedId = self._transformId(actualId)
953  return template % transformedId
954  except Exception as e:
955  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
956 
957  @staticmethod
958  def getShortCcdName(ccdName):
959  """Convert a CCD name to a form useful as a filename
960 
961  The default implementation converts spaces to underscores.
962  """
963  return ccdName.replace(" ", "_")
964 
965  def _extractDetectorName(self, dataId):
966  """Extract the detector (CCD) name from the dataset identifier.
967 
968  The name in question is the detector name used by lsst.afw.cameraGeom.
969 
970  Parameters
971  ----------
972  dataId : `dict`
973  Dataset identifier.
974 
975  Returns
976  -------
977  `str`
978  Detector name
979  """
980  raise NotImplementedError("No _extractDetectorName() function specified")
981 
982  def _extractAmpId(self, dataId):
983  """Extract the amplifier identifer from a dataset identifier.
984 
985  .. note:: Deprecated in 11_0
986 
987  amplifier identifier has two parts: the detector name for the CCD
988  containing the amplifier and index of the amplifier in the detector.
989 
990  Parameters
991  ----------
992  dataId : `dict`
993  Dataset identifer
994 
995  Returns
996  -------
997  `tuple`
998  Amplifier identifier
999  """
1000 
1001  trDataId = self._transformId(dataId)
1002  return (trDataId["ccd"], int(trDataId['amp']))
1003 
1004  def _setAmpDetector(self, item, dataId, trimmed=True):
1005  """Set the detector object in an Exposure for an amplifier.
1006 
1007  Defects are also added to the Exposure based on the detector object.
1008 
1009  Parameters
1010  ----------
1011  item : `lsst.afw.image.Exposure`
1012  Exposure to set the detector in.
1013  dataId : `dict`
1014  Dataset identifier
1015  trimmed : `bool`
1016  Should detector be marked as trimmed? (ignored)
1017  """
1018 
1019  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1020 
1021  def _setCcdDetector(self, item, dataId, trimmed=True):
1022  """Set the detector object in an Exposure for a CCD.
1023 
1024  Parameters
1025  ----------
1026  item : `lsst.afw.image.Exposure`
1027  Exposure to set the detector in.
1028  dataId : `dict`
1029  Dataset identifier
1030  trimmed : `bool`
1031  Should detector be marked as trimmed? (ignored)
1032  """
1033  if item.getDetector() is not None:
1034  return
1035 
1036  detectorName = self._extractDetectorName(dataId)
1037  detector = self.camera[detectorName]
1038  item.setDetector(detector)
1039 
1040  def _setFilter(self, mapping, item, dataId):
1041  """Set the filter object in an Exposure. If the Exposure had a FILTER
1042  keyword, this was already processed during load. But if it didn't,
1043  use the filter from the registry.
1044 
1045  Parameters
1046  ----------
1047  mapping : `lsst.obs.base.Mapping`
1048  Where to get the filter from.
1049  item : `lsst.afw.image.Exposure`
1050  Exposure to set the filter in.
1051  dataId : `dict`
1052  Dataset identifier.
1053  """
1054 
1055  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1056  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1057  return
1058 
1059  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1060  return
1061 
1062  actualId = mapping.need(['filter'], dataId)
1063  filterName = actualId['filter']
1064  if self.filters is not None and filterName in self.filters:
1065  filterName = self.filters[filterName]
1066  item.setFilter(afwImage.Filter(filterName))
1067 
1068  # Default standardization function for exposures
1069  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1070  trimmed=True, setVisitInfo=True):
1071  """Default standardization function for images.
1072 
1073  This sets the Detector from the camera geometry
1074  and optionally set the Fiter. In both cases this saves
1075  having to persist some data in each exposure (or image).
1076 
1077  Parameters
1078  ----------
1079  mapping : `lsst.obs.base.Mapping`
1080  Where to get the values from.
1081  item : image-like object
1082  Can be any of lsst.afw.image.Exposure,
1083  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1084  or lsst.afw.image.MaskedImage
1085 
1086  dataId : `dict`
1087  Dataset identifier
1088  filter : `bool`
1089  Set filter? Ignored if item is already an exposure
1090  trimmed : `bool`
1091  Should detector be marked as trimmed?
1092  setVisitInfo : `bool`
1093  Should Exposure have its VisitInfo filled out from the metadata?
1094 
1095  Returns
1096  -------
1097  `lsst.afw.image.Exposure`
1098  The standardized Exposure.
1099  """
1100  try:
1101  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
1102  except Exception as e:
1103  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1104  raise
1105 
1106  if mapping.level.lower() == "amp":
1107  self._setAmpDetector(item, dataId, trimmed)
1108  elif mapping.level.lower() == "ccd":
1109  self._setCcdDetector(item, dataId, trimmed)
1110 
1111  if filter:
1112  self._setFilter(mapping, item, dataId)
1113 
1114  return item
1115 
1116  def _defectLookup(self, dataId):
1117  """Find the defects for a given CCD.
1118 
1119  Parameters
1120  ----------
1121  dataId : `dict`
1122  Dataset identifier
1123 
1124  Returns
1125  -------
1126  `str`
1127  Path to the defects file or None if not available.
1128  """
1129  if self.defectRegistry is None:
1130  return None
1131  if self.registry is None:
1132  raise RuntimeError("No registry for defect lookup")
1133 
1134  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1135 
1136  dataIdForLookup = {'visit': dataId['visit']}
1137  # .lookup will fail in a posix registry because there is no template to provide.
1138  rows = self.registry.lookup(('taiObs'), ('raw_visit'), dataIdForLookup)
1139  if len(rows) == 0:
1140  return None
1141  assert len(rows) == 1
1142  taiObs = rows[0][0]
1143 
1144  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1145  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1146  [(ccdKey, "?")],
1147  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1148  (ccdVal, taiObs))
1149  if not rows or len(rows) == 0:
1150  return None
1151  if len(rows) == 1:
1152  return os.path.join(self.defectPath, rows[0][0])
1153  else:
1154  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1155  (ccdVal, taiObs, len(rows), ", ".join([_[0] for _ in rows])))
1156 
1157  def _makeCamera(self, policy, repositoryDir):
1158  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1159  the camera geometry
1160 
1161  Also set self.cameraDataLocation, if relevant (else it can be left
1162  None).
1163 
1164  This implementation assumes that policy contains an entry "camera"
1165  that points to the subdirectory in this package of camera data;
1166  specifically, that subdirectory must contain:
1167  - a file named `camera.py` that contains persisted camera config
1168  - ampInfo table FITS files, as required by
1169  lsst.afw.cameraGeom.makeCameraFromPath
1170 
1171  Parameters
1172  ----------
1173  policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy`
1174  Policy with per-camera defaults already merged
1175  (PexPolicy only for backward compatibility).
1176  repositoryDir : `str`
1177  Policy repository for the subclassing module (obtained with
1178  getRepositoryPath() on the per-camera default dictionary).
1179  """
1180  if isinstance(policy, pexPolicy.Policy):
1181  policy = dafPersist.Policy(pexPolicy=policy)
1182  if 'camera' not in policy:
1183  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1184  cameraDataSubdir = policy['camera']
1185  self.cameraDataLocation = os.path.normpath(
1186  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1187  cameraConfig = afwCameraGeom.CameraConfig()
1188  cameraConfig.load(self.cameraDataLocation)
1189  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1190  return afwCameraGeom.makeCameraFromPath(
1191  cameraConfig=cameraConfig,
1192  ampInfoPath=ampInfoPath,
1193  shortNameFunc=self.getShortCcdName,
1194  pupilFactoryClass=self.PupilFactoryClass
1195  )
1196 
1197  def getRegistry(self):
1198  """Get the registry used by this mapper.
1199 
1200  Returns
1201  -------
1202  Registry or None
1203  The registry used by this mapper for this mapper's repository.
1204  """
1205  return self.registry
1206 
1207  def getImageCompressionSettings(self, datasetType, dataId):
1208  """Stuff image compression settings into a daf.base.PropertySet
1209 
1210  This goes into the ButlerLocation's "additionalData", which gets
1211  passed into the boost::persistence framework.
1212 
1213  Parameters
1214  ----------
1215  datasetType : `str`
1216  Type of dataset for which to get the image compression settings.
1217  dataId : `dict`
1218  Dataset identifier.
1219 
1220  Returns
1221  -------
1222  additionalData : `lsst.daf.base.PropertySet`
1223  Image compression settings.
1224  """
1225  mapping = self.mappings[datasetType]
1226  recipeName = mapping.recipe
1227  storageType = mapping.storage
1228  if storageType not in self._writeRecipes:
1229  return dafBase.PropertySet()
1230  if recipeName not in self._writeRecipes[storageType]:
1231  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1232  (datasetType, storageType, recipeName))
1233  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1234  seed = hash(tuple(dataId.items())) % 2**31
1235  for plane in ("image", "mask", "variance"):
1236  if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1237  recipe.set(plane + ".scaling.seed", seed)
1238  return recipe
1239 
1240  def _initWriteRecipes(self):
1241  """Read the recipes for writing files
1242 
1243  These recipes are currently used for configuring FITS compression,
1244  but they could have wider uses for configuring different flavors
1245  of the storage types. A recipe is referred to by a symbolic name,
1246  which has associated settings. These settings are stored as a
1247  `PropertySet` so they can easily be passed down to the
1248  boost::persistence framework as the "additionalData" parameter.
1249 
1250  The list of recipes is written in YAML. A default recipe and
1251  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1252  and these may be overridden or supplemented by the individual obs_*
1253  packages' own policy/writeRecipes.yaml files.
1254 
1255  Recipes are grouped by the storage type. Currently, only the
1256  ``FitsStorage`` storage type uses recipes, which uses it to
1257  configure FITS image compression.
1258 
1259  Each ``FitsStorage`` recipe for FITS compression should define
1260  "image", "mask" and "variance" entries, each of which may contain
1261  "compression" and "scaling" entries. Defaults will be provided for
1262  any missing elements under "compression" and "scaling".
1263 
1264  The allowed entries under "compression" are:
1265 
1266  * algorithm (string): compression algorithm to use
1267  * rows (int): number of rows per tile (0 = entire dimension)
1268  * columns (int): number of columns per tile (0 = entire dimension)
1269  * quantizeLevel (float): cfitsio quantization level
1270 
1271  The allowed entries under "scaling" are:
1272 
1273  * algorithm (string): scaling algorithm to use
1274  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1275  * fuzz (bool): fuzz the values when quantising floating-point values?
1276  * seed (long): seed for random number generator when fuzzing
1277  * maskPlanes (list of string): mask planes to ignore when doing
1278  statistics
1279  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1280  * quantizePad: number of stdev to allow on the low side (for
1281  STDEV_POSITIVE/NEGATIVE)
1282  * bscale: manually specified BSCALE (for MANUAL scaling)
1283  * bzero: manually specified BSCALE (for MANUAL scaling)
1284 
1285  A very simple example YAML recipe:
1286 
1287  FitsStorage:
1288  default:
1289  image: &default
1290  compression:
1291  algorithm: GZIP_SHUFFLE
1292  mask: *default
1293  variance: *default
1294  """
1295  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1296  recipes = dafPersist.Policy(recipesFile)
1297  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1298  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1299  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1300  supplements = dafPersist.Policy(supplementsFile)
1301  # Don't allow overrides, only supplements
1302  for entry in validationMenu:
1303  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1304  if intersection:
1305  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1306  (supplementsFile, entry, recipesFile, intersection))
1307  recipes.update(supplements)
1308 
1309  self._writeRecipes = {}
1310  for storageType in recipes.names(True):
1311  if "default" not in recipes[storageType]:
1312  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1313  (storageType, recipesFile))
1314  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1315 
1316 
1317 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1318  """Generate an Exposure from an image-like object
1319 
1320  If the image is a DecoratedImage then also set its WCS and metadata
1321  (Image and MaskedImage are missing the necessary metadata
1322  and Exposure already has those set)
1323 
1324  Parameters
1325  ----------
1326  image : Image-like object
1327  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1328  Exposure.
1329 
1330  Returns
1331  -------
1332  `lsst.afw.image.Exposure`
1333  Exposure containing input image.
1334  """
1335  metadata = None
1336  if isinstance(image, afwImage.MaskedImage):
1337  exposure = afwImage.makeExposure(image)
1338  elif isinstance(image, afwImage.DecoratedImage):
1339  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1340  metadata = image.getMetadata()
1341  try:
1342  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1343  exposure.setWcs(wcs)
1344  except pexExcept.TypeError as e:
1345  # raised on failure to create a wcs (and possibly others)
1346  if logger is None:
1347  logger = lsstLog.Log.getLogger("CameraMapper")
1348  logger.debug("wcs set to None; insufficient information found in metadata to create a valid wcs:"
1349  " %s", e.args[0])
1350 
1351  exposure.setMetadata(metadata)
1352  elif isinstance(image, afwImage.Exposure):
1353  # Exposure
1354  exposure = image
1355  metadata = exposure.getMetadata()
1356  else:
1357  # Image
1358  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1359  #
1360  # set VisitInfo if we can
1361  #
1362  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1363  if metadata is not None:
1364  if mapper is None:
1365  if not logger:
1366  logger = lsstLog.Log.getLogger("CameraMapper")
1367  logger.warn("I can only set the VisitInfo if you provide a mapper")
1368  else:
1369  exposureId = mapper._computeCcdExposureId(dataId)
1370  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1371 
1372  exposure.getInfo().setVisitInfo(visitInfo)
1373 
1374  return exposure
1375 
1376 
1378  """Validate recipes for FitsStorage
1379 
1380  The recipes are supplemented with default values where appropriate.
1381 
1382  TODO: replace this custom validation code with Cerberus (DM-11846)
1383 
1384  Parameters
1385  ----------
1386  recipes : `lsst.daf.persistence.Policy`
1387  FitsStorage recipes to validate.
1388 
1389  Returns
1390  -------
1391  validated : `lsst.daf.base.PropertySet`
1392  Validated FitsStorage recipe.
1393 
1394  Raises
1395  ------
1396  `RuntimeError`
1397  If validation fails.
1398  """
1399  # Schemas define what should be there, and the default values (and by the default
1400  # value, the expected type).
1401  compressionSchema = {
1402  "algorithm": "NONE",
1403  "rows": 1,
1404  "columns": 0,
1405  "quantizeLevel": 0.0,
1406  }
1407  scalingSchema = {
1408  "algorithm": "NONE",
1409  "bitpix": 0,
1410  "maskPlanes": ["NO_DATA"],
1411  "seed": 0,
1412  "quantizeLevel": 4.0,
1413  "quantizePad": 5.0,
1414  "fuzz": True,
1415  "bscale": 1.0,
1416  "bzero": 0.0,
1417  }
1418 
1419  def checkUnrecognized(entry, allowed, description):
1420  """Check to see if the entry contains unrecognised keywords"""
1421  unrecognized = set(entry.keys()) - set(allowed)
1422  if unrecognized:
1423  raise RuntimeError(
1424  "Unrecognized entries when parsing image compression recipe %s: %s" %
1425  (description, unrecognized))
1426 
1427  validated = {}
1428  for name in recipes.names(True):
1429  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1430  rr = dafBase.PropertySet()
1431  validated[name] = rr
1432  for plane in ("image", "mask", "variance"):
1433  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1434  name + "->" + plane)
1435 
1436  for settings, schema in (("compression", compressionSchema),
1437  ("scaling", scalingSchema)):
1438  prefix = plane + "." + settings
1439  if settings not in recipes[name][plane]:
1440  for key in schema:
1441  rr.set(prefix + "." + key, schema[key])
1442  continue
1443  entry = recipes[name][plane][settings]
1444  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1445  for key in schema:
1446  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1447  rr.set(prefix + "." + key, value)
1448  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.