lsst.obs.base  17.0.1-6-g7bb9714
cameraMapper.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008, 2009, 2010 LSST Corporation.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 
23 import copy
24 import os
25 from astropy.io import fits # required by _makeDefectsDict until defects are written as AFW tables
26 import re
27 import weakref
28 import lsst.daf.persistence as dafPersist
29 from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base as dafBase
31 import lsst.afw.geom as afwGeom
32 import lsst.afw.image as afwImage
33 import lsst.afw.table as afwTable
34 from lsst.afw.fits import readMetadata
35 import lsst.afw.cameraGeom as afwCameraGeom
36 import lsst.log as lsstLog
37 import lsst.pex.exceptions as pexExcept
38 from .exposureIdInfo import ExposureIdInfo
39 from .makeRawVisitInfo import MakeRawVisitInfo
40 from lsst.utils import getPackageDir
41 
42 __all__ = ["CameraMapper", "exposureFromImage"]
43 
44 
45 class CameraMapper(dafPersist.Mapper):
46 
47  """CameraMapper is a base class for mappers that handle images from a
48  camera and products derived from them. This provides an abstraction layer
49  between the data on disk and the code.
50 
51  Public methods: keys, queryMetadata, getDatasetTypes, map,
52  canStandardize, standardize
53 
54  Mappers for specific data sources (e.g., CFHT Megacam, LSST
55  simulations, etc.) should inherit this class.
56 
57  The CameraMapper manages datasets within a "root" directory. Note that
58  writing to a dataset present in the input root will hide the existing
59  dataset but not overwrite it. See #2160 for design discussion.
60 
61  A camera is assumed to consist of one or more rafts, each composed of
62  multiple CCDs. Each CCD is in turn composed of one or more amplifiers
63  (amps). A camera is also assumed to have a camera geometry description
64  (CameraGeom object) as a policy file, a filter description (Filter class
65  static configuration) as another policy file, and an optional defects
66  description directory.
67 
68  Information from the camera geometry and defects are inserted into all
69  Exposure objects returned.
70 
71  The mapper uses one or two registries to retrieve metadata about the
72  images. The first is a registry of all raw exposures. This must contain
73  the time of the observation. One or more tables (or the equivalent)
74  within the registry are used to look up data identifier components that
75  are not specified by the user (e.g. filter) and to return results for
76  metadata queries. The second is an optional registry of all calibration
77  data. This should contain validity start and end entries for each
78  calibration dataset in the same timescale as the observation time.
79 
80  Subclasses will typically set MakeRawVisitInfoClass:
81 
82  MakeRawVisitInfoClass: a class variable that points to a subclass of
83  MakeRawVisitInfo, a functor that creates an
84  lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
85 
86  Subclasses must provide the following methods:
87 
88  _extractDetectorName(self, dataId): returns the detector name for a CCD
89  (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
90  a dataset identifier referring to that CCD or a subcomponent of it.
91 
92  _computeCcdExposureId(self, dataId): see below
93 
94  _computeCoaddExposureId(self, dataId, singleFilter): see below
95 
96  Subclasses may also need to override the following methods:
97 
98  _transformId(self, dataId): transformation of a data identifier
99  from colloquial usage (e.g., "ccdname") to proper/actual usage
100  (e.g., "ccd"), including making suitable for path expansion (e.g. removing
101  commas). The default implementation does nothing. Note that this
102  method should not modify its input parameter.
103 
104  getShortCcdName(self, ccdName): a static method that returns a shortened
105  name suitable for use as a filename. The default version converts spaces
106  to underscores.
107 
108  _getCcdKeyVal(self, dataId): return a CCD key and value
109  by which to look up defects in the defects registry.
110  The default value returns ("ccd", detector name)
111 
112  _mapActualToPath(self, template, actualId): convert a template path to an
113  actual path, using the actual dataset identifier.
114 
115  The mapper's behaviors are largely specified by the policy file.
116  See the MapperDictionary.paf for descriptions of the available items.
117 
118  The 'exposures', 'calibrations', and 'datasets' subpolicies configure
119  mappings (see Mappings class).
120 
121  Common default mappings for all subclasses can be specified in the
122  "policy/{images,exposures,calibrations,datasets}.yaml" files. This
123  provides a simple way to add a product to all camera mappers.
124 
125  Functions to map (provide a path to the data given a dataset
126  identifier dictionary) and standardize (convert data into some standard
127  format or type) may be provided in the subclass as "map_{dataset type}"
128  and "std_{dataset type}", respectively.
129 
130  If non-Exposure datasets cannot be retrieved using standard
131  daf_persistence methods alone, a "bypass_{dataset type}" function may be
132  provided in the subclass to return the dataset instead of using the
133  "datasets" subpolicy.
134 
135  Implementations of map_camera and bypass_camera that should typically be
136  sufficient are provided in this base class.
137 
138  Notes
139  -----
140  TODO:
141 
142  - Handle defects the same was as all other calibration products, using the
143  calibration registry
144  - Instead of auto-loading the camera at construction time, load it from
145  the calibration registry
146  - Rewrite defects as AFW tables so we don't need astropy.io.fits to
147  unpersist them; then remove all mention of astropy.io.fits from this
148  package.
149  """
150  packageName = None
151 
152  # a class or subclass of MakeRawVisitInfo, a functor that makes an
153  # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
154  MakeRawVisitInfoClass = MakeRawVisitInfo
155 
156  # a class or subclass of PupilFactory
157  PupilFactoryClass = afwCameraGeom.PupilFactory
158 
159  def __init__(self, policy, repositoryDir,
160  root=None, registry=None, calibRoot=None, calibRegistry=None,
161  provided=None, parentRegistry=None, repositoryCfg=None):
162  """Initialize the CameraMapper.
163 
164  Parameters
165  ----------
166  policy : daf_persistence.Policy,
167  Policy with per-camera defaults already merged.
168  repositoryDir : string
169  Policy repository for the subclassing module (obtained with
170  getRepositoryPath() on the per-camera default dictionary).
171  root : string, optional
172  Path to the root directory for data.
173  registry : string, optional
174  Path to registry with data's metadata.
175  calibRoot : string, optional
176  Root directory for calibrations.
177  calibRegistry : string, optional
178  Path to registry with calibrations' metadata.
179  provided : list of string, optional
180  Keys provided by the mapper.
181  parentRegistry : Registry subclass, optional
182  Registry from a parent repository that may be used to look up
183  data's metadata.
184  repositoryCfg : daf_persistence.RepositoryCfg or None, optional
185  The configuration information for the repository this mapper is
186  being used with.
187  """
188 
189  dafPersist.Mapper.__init__(self)
190 
191  self.log = lsstLog.Log.getLogger("CameraMapper")
192 
193  if root:
194  self.root = root
195  elif repositoryCfg:
196  self.root = repositoryCfg.root
197  else:
198  self.root = None
199 
200  repoPolicy = repositoryCfg.policy if repositoryCfg else None
201  if repoPolicy is not None:
202  policy.update(repoPolicy)
203 
204  # Levels
205  self.levels = dict()
206  if 'levels' in policy:
207  levelsPolicy = policy['levels']
208  for key in levelsPolicy.names(True):
209  self.levels[key] = set(levelsPolicy.asArray(key))
210  self.defaultLevel = policy['defaultLevel']
211  self.defaultSubLevels = dict()
212  if 'defaultSubLevels' in policy:
213  self.defaultSubLevels = policy['defaultSubLevels']
214 
215  # Root directories
216  if root is None:
217  root = "."
218  root = dafPersist.LogicalLocation(root).locString()
219 
220  self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
221 
222  # If the calibRoot is passed in, use that. If not and it's indicated in
223  # the policy, use that. And otherwise, the calibs are in the regular
224  # root.
225  # If the location indicated by the calib root does not exist, do not
226  # create it.
227  calibStorage = None
228  if calibRoot is not None:
229  calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
230  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
231  create=False)
232  else:
233  calibRoot = policy.get('calibRoot', None)
234  if calibRoot:
235  calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
236  create=False)
237  if calibStorage is None:
238  calibStorage = self.rootStorage
239 
240  self.root = root
241 
242  # Registries
243  self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
244  self.rootStorage, searchParents=False,
245  posixIfNoSql=(not parentRegistry))
246  if not self.registry:
247  self.registry = parentRegistry
248  needCalibRegistry = policy.get('needCalibRegistry', None)
249  if needCalibRegistry:
250  if calibStorage:
251  self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
252  "calibRegistryPath", calibStorage,
253  posixIfNoSql=False) # NB never use posix for calibs
254  else:
255  raise RuntimeError(
256  "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
257  "calibRoot ivar:%s or policy['calibRoot']:%s" %
258  (calibRoot, policy.get('calibRoot', None)))
259  else:
260  self.calibRegistry = None
261 
262  # Dict of valid keys and their value types
263  self.keyDict = dict()
264 
265  self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
266  self._initWriteRecipes()
267 
268  # Camera geometry
269  self.cameraDataLocation = None # path to camera geometry config file
270  self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
271 
272  # Defect registry and root. Defects are stored with the camera and the registry is loaded from the
273  # camera package, which is on the local filesystem.
274  self.defectRegistry = None
275  if 'defects' in policy:
276  self.defectPath = os.path.join(repositoryDir, policy['defects'])
277  defectRegistryLocation = os.path.join(self.defectPath, "defectRegistry.sqlite3")
278  self.defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
279 
280  # Filter translation table
281  self.filters = None
282 
283  # verify that the class variable packageName is set before attempting
284  # to instantiate an instance
285  if self.packageName is None:
286  raise ValueError('class variable packageName must not be None')
287 
289 
290  def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
291  """Initialize mappings
292 
293  For each of the dataset types that we want to be able to read, there
294  are methods that can be created to support them:
295  * map_<dataset> : determine the path for dataset
296  * std_<dataset> : standardize the retrieved dataset
297  * bypass_<dataset> : retrieve the dataset (bypassing the usual
298  retrieval machinery)
299  * query_<dataset> : query the registry
300 
301  Besides the dataset types explicitly listed in the policy, we create
302  additional, derived datasets for additional conveniences,
303  e.g., reading the header of an image, retrieving only the size of a
304  catalog.
305 
306  Parameters
307  ----------
308  policy : `lsst.daf.persistence.Policy`
309  Policy with per-camera defaults already merged
310  rootStorage : `Storage subclass instance`
311  Interface to persisted repository data.
312  calibRoot : `Storage subclass instance`
313  Interface to persisted calib repository data
314  provided : `list` of `str`
315  Keys provided by the mapper
316  """
317  # Sub-dictionaries (for exposure/calibration/dataset types)
318  imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
319  "obs_base", "ImageMappingDefaults.yaml", "policy"))
320  expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
321  "obs_base", "ExposureMappingDefaults.yaml", "policy"))
322  calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323  "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
324  dsMappingPolicy = dafPersist.Policy()
325 
326  # Mappings
327  mappingList = (
328  ("images", imgMappingPolicy, ImageMapping),
329  ("exposures", expMappingPolicy, ExposureMapping),
330  ("calibrations", calMappingPolicy, CalibrationMapping),
331  ("datasets", dsMappingPolicy, DatasetMapping)
332  )
333  self.mappings = dict()
334  for name, defPolicy, cls in mappingList:
335  if name in policy:
336  datasets = policy[name]
337 
338  # Centrally-defined datasets
339  defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
340  if os.path.exists(defaultsPath):
341  datasets.merge(dafPersist.Policy(defaultsPath))
342 
343  mappings = dict()
344  setattr(self, name, mappings)
345  for datasetType in datasets.names(True):
346  subPolicy = datasets[datasetType]
347  subPolicy.merge(defPolicy)
348 
349  if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
350  def compositeClosure(dataId, write=False, mapper=None, mapping=None,
351  subPolicy=subPolicy):
352  components = subPolicy.get('composite')
353  assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
354  disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
355  python = subPolicy['python']
356  butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
357  disassembler=disassembler,
358  python=python,
359  dataId=dataId,
360  mapper=self)
361  for name, component in components.items():
362  butlerComposite.add(id=name,
363  datasetType=component.get('datasetType'),
364  setter=component.get('setter', None),
365  getter=component.get('getter', None),
366  subset=component.get('subset', False),
367  inputOnly=component.get('inputOnly', False))
368  return butlerComposite
369  setattr(self, "map_" + datasetType, compositeClosure)
370  # for now at least, don't set up any other handling for this dataset type.
371  continue
372 
373  if name == "calibrations":
374  mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
375  provided=provided, dataRoot=rootStorage)
376  else:
377  mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
378  self.keyDict.update(mapping.keys())
379  mappings[datasetType] = mapping
380  self.mappings[datasetType] = mapping
381  if not hasattr(self, "map_" + datasetType):
382  def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
383  return mapping.map(mapper, dataId, write)
384  setattr(self, "map_" + datasetType, mapClosure)
385  if not hasattr(self, "query_" + datasetType):
386  def queryClosure(format, dataId, mapping=mapping):
387  return mapping.lookup(format, dataId)
388  setattr(self, "query_" + datasetType, queryClosure)
389  if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
390  def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
391  return mapping.standardize(mapper, item, dataId)
392  setattr(self, "std_" + datasetType, stdClosure)
393 
394  def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
395  """Set convenience methods on CameraMapper"""
396  mapName = "map_" + datasetType + "_" + suffix
397  bypassName = "bypass_" + datasetType + "_" + suffix
398  queryName = "query_" + datasetType + "_" + suffix
399  if not hasattr(self, mapName):
400  setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
401  if not hasattr(self, bypassName):
402  if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
403  bypassImpl = getattr(self, "bypass_" + datasetType)
404  if bypassImpl is not None:
405  setattr(self, bypassName, bypassImpl)
406  if not hasattr(self, queryName):
407  setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
408 
409  # Filename of dataset
410  setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
411  [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
412  # Metadata from FITS file
413  if subPolicy["storage"] == "FitsStorage": # a FITS image
414  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
415  readMetadata(location.getLocationsWithRoot()[0]))
416 
417  # Add support for configuring FITS compression
418  addName = "add_" + datasetType
419  if not hasattr(self, addName):
420  setattr(self, addName, self.getImageCompressionSettings)
421 
422  if name == "exposures":
423  def getSkyWcs(datasetType, pythonType, location, dataId):
424  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
425  return fitsReader.readWcs()
426 
427  setMethods("wcs", bypassImpl=getSkyWcs)
428 
429  setMethods("calib", bypassImpl=lambda datasetType, pythonType, location, dataId:
430  afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
431 
432  def getVisitInfo(datasetType, pythonType, location, dataId):
433  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
434  return fitsReader.readVisitInfo()
435 
436  setMethods("visitInfo", bypassImpl=getVisitInfo)
437 
438  def getFilter(datasetType, pythonType, location, dataId):
439  fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
440  return fitsReader.readFilter()
441 
442  setMethods("filter", bypassImpl=getFilter)
443 
444  setMethods("detector",
445  mapImpl=lambda dataId, write=False:
446  dafPersist.ButlerLocation(
447  pythonType="lsst.afw.cameraGeom.CameraConfig",
448  cppType="Config",
449  storageName="Internal",
450  locationList="ignored",
451  dataId=dataId,
452  mapper=self,
453  storage=None,
454  ),
455  bypassImpl=lambda datasetType, pythonType, location, dataId:
456  self.camera[self._extractDetectorName(dataId)]
457  )
458  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
459  afwImage.bboxFromMetadata(
460  readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
461 
462  elif name == "images":
463  setMethods("bbox", bypassImpl=lambda dsType, pyType, location, dataId:
464  afwImage.bboxFromMetadata(
465  readMetadata(location.getLocationsWithRoot()[0])))
466 
467  if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
468  setMethods("md", bypassImpl=lambda datasetType, pythonType, location, dataId:
469  readMetadata(os.path.join(location.getStorage().root,
470  location.getLocations()[0]), hdu=1))
471 
472  # Sub-images
473  if subPolicy["storage"] == "FitsStorage":
474  def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
475  subId = dataId.copy()
476  del subId['bbox']
477  loc = mapping.map(mapper, subId, write)
478  bbox = dataId['bbox']
479  llcX = bbox.getMinX()
480  llcY = bbox.getMinY()
481  width = bbox.getWidth()
482  height = bbox.getHeight()
483  loc.additionalData.set('llcX', llcX)
484  loc.additionalData.set('llcY', llcY)
485  loc.additionalData.set('width', width)
486  loc.additionalData.set('height', height)
487  if 'imageOrigin' in dataId:
488  loc.additionalData.set('imageOrigin',
489  dataId['imageOrigin'])
490  return loc
491 
492  def querySubClosure(key, format, dataId, mapping=mapping):
493  subId = dataId.copy()
494  del subId['bbox']
495  return mapping.lookup(format, subId)
496  setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
497 
498  if subPolicy["storage"] == "FitsCatalogStorage":
499  # Length of catalog
500  setMethods("len", bypassImpl=lambda datasetType, pythonType, location, dataId:
501  readMetadata(os.path.join(location.getStorage().root,
502  location.getLocations()[0]),
503  hdu=1).getScalar("NAXIS2"))
504 
505  # Schema of catalog
506  if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
507  setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
508  afwTable.Schema.readFits(os.path.join(location.getStorage().root,
509  location.getLocations()[0])))
510 
511  def _computeCcdExposureId(self, dataId):
512  """Compute the 64-bit (long) identifier for a CCD exposure.
513 
514  Subclasses must override
515 
516  Parameters
517  ----------
518  dataId : `dict`
519  Data identifier with visit, ccd.
520  """
521  raise NotImplementedError()
522 
523  def _computeCoaddExposureId(self, dataId, singleFilter):
524  """Compute the 64-bit (long) identifier for a coadd.
525 
526  Subclasses must override
527 
528  Parameters
529  ----------
530  dataId : `dict`
531  Data identifier with tract and patch.
532  singleFilter : `bool`
533  True means the desired ID is for a single-filter coadd, in which
534  case dataIdmust contain filter.
535  """
536  raise NotImplementedError()
537 
538  def _search(self, path):
539  """Search for path in the associated repository's storage.
540 
541  Parameters
542  ----------
543  path : string
544  Path that describes an object in the repository associated with
545  this mapper.
546  Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
547  indicator will be stripped when searching and so will match
548  filenames without the HDU indicator, e.g. 'foo.fits'. The path
549  returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
550 
551  Returns
552  -------
553  string
554  The path for this object in the repository. Will return None if the
555  object can't be found. If the input argument path contained an HDU
556  indicator, the returned path will also contain the HDU indicator.
557  """
558  return self.rootStorage.search(path)
559 
560  def backup(self, datasetType, dataId):
561  """Rename any existing object with the given type and dataId.
562 
563  The CameraMapper implementation saves objects in a sequence of e.g.:
564 
565  - foo.fits
566  - foo.fits~1
567  - foo.fits~2
568 
569  All of the backups will be placed in the output repo, however, and will
570  not be removed if they are found elsewhere in the _parent chain. This
571  means that the same file will be stored twice if the previous version
572  was found in an input repo.
573  """
574 
575  # Calling PosixStorage directly is not the long term solution in this
576  # function, this is work-in-progress on epic DM-6225. The plan is for
577  # parentSearch to be changed to 'search', and search only the storage
578  # associated with this mapper. All searching of parents will be handled
579  # by traversing the container of repositories in Butler.
580 
581  def firstElement(list):
582  """Get the first element in the list, or None if that can't be
583  done.
584  """
585  return list[0] if list is not None and len(list) else None
586 
587  n = 0
588  newLocation = self.map(datasetType, dataId, write=True)
589  newPath = newLocation.getLocations()[0]
590  path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
591  path = firstElement(path)
592  oldPaths = []
593  while path is not None:
594  n += 1
595  oldPaths.append((n, path))
596  path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
597  path = firstElement(path)
598  for n, oldPath in reversed(oldPaths):
599  self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
600 
601  def keys(self):
602  """Return supported keys.
603 
604  Returns
605  -------
606  iterable
607  List of keys usable in a dataset identifier
608  """
609  return iter(self.keyDict.keys())
610 
611  def getKeys(self, datasetType, level):
612  """Return a dict of supported keys and their value types for a given
613  dataset type at a given level of the key hierarchy.
614 
615  Parameters
616  ----------
617  datasetType : `str`
618  Dataset type or None for all dataset types.
619  level : `str` or None
620  Level or None for all levels or '' for the default level for the
621  camera.
622 
623  Returns
624  -------
625  `dict`
626  Keys are strings usable in a dataset identifier, values are their
627  value types.
628  """
629 
630  # not sure if this is how we want to do this. what if None was intended?
631  if level == '':
632  level = self.getDefaultLevel()
633 
634  if datasetType is None:
635  keyDict = copy.copy(self.keyDict)
636  else:
637  keyDict = self.mappings[datasetType].keys()
638  if level is not None and level in self.levels:
639  keyDict = copy.copy(keyDict)
640  for l in self.levels[level]:
641  if l in keyDict:
642  del keyDict[l]
643  return keyDict
644 
645  def getDefaultLevel(self):
646  return self.defaultLevel
647 
648  def getDefaultSubLevel(self, level):
649  if level in self.defaultSubLevels:
650  return self.defaultSubLevels[level]
651  return None
652 
653  @classmethod
654  def getCameraName(cls):
655  """Return the name of the camera that this CameraMapper is for."""
656  className = str(cls)
657  className = className[className.find('.'):-1]
658  m = re.search(r'(\w+)Mapper', className)
659  if m is None:
660  m = re.search(r"class '[\w.]*?(\w+)'", className)
661  name = m.group(1)
662  return name[:1].lower() + name[1:] if name else ''
663 
664  @classmethod
665  def getPackageName(cls):
666  """Return the name of the package containing this CameraMapper."""
667  if cls.packageName is None:
668  raise ValueError('class variable packageName must not be None')
669  return cls.packageName
670 
671  @classmethod
672  def getPackageDir(cls):
673  """Return the base directory of this package"""
674  return getPackageDir(cls.getPackageName())
675 
676  def map_camera(self, dataId, write=False):
677  """Map a camera dataset."""
678  if self.camera is None:
679  raise RuntimeError("No camera dataset available.")
680  actualId = self._transformId(dataId)
681  return dafPersist.ButlerLocation(
682  pythonType="lsst.afw.cameraGeom.CameraConfig",
683  cppType="Config",
684  storageName="ConfigStorage",
685  locationList=self.cameraDataLocation or "ignored",
686  dataId=actualId,
687  mapper=self,
688  storage=self.rootStorage
689  )
690 
691  def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
692  """Return the (preloaded) camera object.
693  """
694  if self.camera is None:
695  raise RuntimeError("No camera dataset available.")
696  return self.camera
697 
698  def map_defects(self, dataId, write=False):
699  """Map defects dataset.
700 
701  Returns
702  -------
703  `lsst.daf.butler.ButlerLocation`
704  Minimal ButlerLocation containing just the locationList field
705  (just enough information that bypass_defects can use it).
706  """
707  defectFitsPath = self._defectLookup(dataId=dataId)
708  if defectFitsPath is None:
709  raise RuntimeError("No defects available for dataId=%s" % (dataId,))
710 
711  return dafPersist.ButlerLocation(None, None, None, defectFitsPath,
712  dataId, self,
713  storage=self.rootStorage)
714 
715  def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId):
716  """Return a defect based on the butler location returned by map_defects
717 
718  Parameters
719  ----------
720  butlerLocation : `lsst.daf.persistence.ButlerLocation`
721  locationList = path to defects FITS file
722  dataId : `dict`
723  Butler data ID; "ccd" must be set.
724 
725  Note: the name "bypass_XXX" means the butler makes no attempt to
726  convert the ButlerLocation into an object, which is what we want for
727  now, since that conversion is a bit tricky.
728  """
729  detectorName = self._extractDetectorName(dataId)
730  defectsFitsPath = butlerLocation.locationList[0]
731 
732  with fits.open(defectsFitsPath) as hduList:
733  for hdu in hduList[1:]:
734  if hdu.header["name"] != detectorName:
735  continue
736 
737  defectList = []
738  for data in hdu.data:
739  bbox = afwGeom.Box2I(
740  afwGeom.Point2I(int(data['x0']), int(data['y0'])),
741  afwGeom.Extent2I(int(data['width']), int(data['height'])),
742  )
743  defectList.append(afwImage.DefectBase(bbox))
744  return defectList
745 
746  raise RuntimeError("No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
747 
748  def map_expIdInfo(self, dataId, write=False):
749  return dafPersist.ButlerLocation(
750  pythonType="lsst.obs.base.ExposureIdInfo",
751  cppType=None,
752  storageName="Internal",
753  locationList="ignored",
754  dataId=dataId,
755  mapper=self,
756  storage=self.rootStorage
757  )
758 
759  def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
760  """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
761  expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
762  expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
763  return ExposureIdInfo(expId=expId, expBits=expBits)
764 
765  def std_bfKernel(self, item, dataId):
766  """Disable standardization for bfKernel
767 
768  bfKernel is a calibration product that is numpy array,
769  unlike other calibration products that are all images;
770  all calibration images are sent through _standardizeExposure
771  due to CalibrationMapping, but we don't want that to happen to bfKernel
772  """
773  return item
774 
775  def std_raw(self, item, dataId):
776  """Standardize a raw dataset by converting it to an Exposure instead
777  of an Image"""
778  return self._standardizeExposure(self.exposures['raw'], item, dataId,
779  trimmed=False, setVisitInfo=True)
780 
781  def map_skypolicy(self, dataId):
782  """Map a sky policy."""
783  return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
784  "Internal", None, None, self,
785  storage=self.rootStorage)
786 
787  def std_skypolicy(self, item, dataId):
788  """Standardize a sky policy by returning the one we use."""
789  return self.skypolicy
790 
791 
796 
797  def _getCcdKeyVal(self, dataId):
798  """Return CCD key and value used to look a defect in the defect
799  registry
800 
801  The default implementation simply returns ("ccd", full detector name)
802  """
803  return ("ccd", self._extractDetectorName(dataId))
804 
805  def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
806  posixIfNoSql=True):
807  """Set up a registry (usually SQLite3), trying a number of possible
808  paths.
809 
810  Parameters
811  ----------
812  name : string
813  Name of registry.
814  description: `str`
815  Description of registry (for log messages)
816  path : string
817  Path for registry.
818  policy : string
819  Policy that contains the registry name, used if path is None.
820  policyKey : string
821  Key in policy for registry path.
822  storage : Storage subclass
823  Repository Storage to look in.
824  searchParents : bool, optional
825  True if the search for a registry should follow any Butler v1
826  _parent symlinks.
827  posixIfNoSql : bool, optional
828  If an sqlite registry is not found, will create a posix registry if
829  this is True.
830 
831  Returns
832  -------
833  lsst.daf.persistence.Registry
834  Registry object
835  """
836  if path is None and policyKey in policy:
837  path = dafPersist.LogicalLocation(policy[policyKey]).locString()
838  if os.path.isabs(path):
839  raise RuntimeError("Policy should not indicate an absolute path for registry.")
840  if not storage.exists(path):
841  newPath = storage.instanceSearch(path)
842 
843  newPath = newPath[0] if newPath is not None and len(newPath) else None
844  if newPath is None:
845  self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
846  path)
847  path = newPath
848  else:
849  self.log.warn("Unable to locate registry at policy path: %s", path)
850  path = None
851 
852  # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
853  # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
854  # root from path. Currently only works with PosixStorage
855  try:
856  root = storage.root
857  if path and (path.startswith(root)):
858  path = path[len(root + '/'):]
859  except AttributeError:
860  pass
861 
862  # determine if there is an sqlite registry and if not, try the posix registry.
863  registry = None
864 
865  def search(filename, description):
866  """Search for file in storage
867 
868  Parameters
869  ----------
870  filename : `str`
871  Filename to search for
872  description : `str`
873  Description of file, for error message.
874 
875  Returns
876  -------
877  path : `str` or `None`
878  Path to file, or None
879  """
880  result = storage.instanceSearch(filename)
881  if result:
882  return result[0]
883  self.log.debug("Unable to locate %s: %s", description, filename)
884  return None
885 
886  # Search for a suitable registry database
887  if path is None:
888  path = search("%s.pgsql" % name, "%s in root" % description)
889  if path is None:
890  path = search("%s.sqlite3" % name, "%s in root" % description)
891  if path is None:
892  path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
893 
894  if path is not None:
895  if not storage.exists(path):
896  newPath = storage.instanceSearch(path)
897  newPath = newPath[0] if newPath is not None and len(newPath) else None
898  if newPath is not None:
899  path = newPath
900  localFileObj = storage.getLocalFile(path)
901  self.log.info("Loading %s registry from %s", description, localFileObj.name)
902  registry = dafPersist.Registry.create(localFileObj.name)
903  localFileObj.close()
904  elif not registry and posixIfNoSql:
905  try:
906  self.log.info("Loading Posix %s registry from %s", description, storage.root)
907  registry = dafPersist.PosixRegistry(storage.root)
908  except Exception:
909  registry = None
910 
911  return registry
912 
913  def _transformId(self, dataId):
914  """Generate a standard ID dict from a camera-specific ID dict.
915 
916  Canonical keys include:
917  - amp: amplifier name
918  - ccd: CCD name (in LSST this is a combination of raft and sensor)
919  The default implementation returns a copy of its input.
920 
921  Parameters
922  ----------
923  dataId : `dict`
924  Dataset identifier; this must not be modified
925 
926  Returns
927  -------
928  `dict`
929  Transformed dataset identifier.
930  """
931 
932  return dataId.copy()
933 
934  def _mapActualToPath(self, template, actualId):
935  """Convert a template path to an actual path, using the actual data
936  identifier. This implementation is usually sufficient but can be
937  overridden by the subclass.
938 
939  Parameters
940  ----------
941  template : `str`
942  Template path
943  actualId : `dict`
944  Dataset identifier
945 
946  Returns
947  -------
948  `str`
949  Pathname
950  """
951 
952  try:
953  transformedId = self._transformId(actualId)
954  return template % transformedId
955  except Exception as e:
956  raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
957 
958  @staticmethod
959  def getShortCcdName(ccdName):
960  """Convert a CCD name to a form useful as a filename
961 
962  The default implementation converts spaces to underscores.
963  """
964  return ccdName.replace(" ", "_")
965 
966  def _extractDetectorName(self, dataId):
967  """Extract the detector (CCD) name from the dataset identifier.
968 
969  The name in question is the detector name used by lsst.afw.cameraGeom.
970 
971  Parameters
972  ----------
973  dataId : `dict`
974  Dataset identifier.
975 
976  Returns
977  -------
978  `str`
979  Detector name
980  """
981  raise NotImplementedError("No _extractDetectorName() function specified")
982 
983  def _extractAmpId(self, dataId):
984  """Extract the amplifier identifer from a dataset identifier.
985 
986  .. note:: Deprecated in 11_0
987 
988  amplifier identifier has two parts: the detector name for the CCD
989  containing the amplifier and index of the amplifier in the detector.
990 
991  Parameters
992  ----------
993  dataId : `dict`
994  Dataset identifer
995 
996  Returns
997  -------
998  `tuple`
999  Amplifier identifier
1000  """
1001 
1002  trDataId = self._transformId(dataId)
1003  return (trDataId["ccd"], int(trDataId['amp']))
1004 
1005  def _setAmpDetector(self, item, dataId, trimmed=True):
1006  """Set the detector object in an Exposure for an amplifier.
1007 
1008  Defects are also added to the Exposure based on the detector object.
1009 
1010  Parameters
1011  ----------
1012  item : `lsst.afw.image.Exposure`
1013  Exposure to set the detector in.
1014  dataId : `dict`
1015  Dataset identifier
1016  trimmed : `bool`
1017  Should detector be marked as trimmed? (ignored)
1018  """
1019 
1020  return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1021 
1022  def _setCcdDetector(self, item, dataId, trimmed=True):
1023  """Set the detector object in an Exposure for a CCD.
1024 
1025  Parameters
1026  ----------
1027  item : `lsst.afw.image.Exposure`
1028  Exposure to set the detector in.
1029  dataId : `dict`
1030  Dataset identifier
1031  trimmed : `bool`
1032  Should detector be marked as trimmed? (ignored)
1033  """
1034  if item.getDetector() is not None:
1035  return
1036 
1037  detectorName = self._extractDetectorName(dataId)
1038  detector = self.camera[detectorName]
1039  item.setDetector(detector)
1040 
1041  def _setFilter(self, mapping, item, dataId):
1042  """Set the filter object in an Exposure. If the Exposure had a FILTER
1043  keyword, this was already processed during load. But if it didn't,
1044  use the filter from the registry.
1045 
1046  Parameters
1047  ----------
1048  mapping : `lsst.obs.base.Mapping`
1049  Where to get the filter from.
1050  item : `lsst.afw.image.Exposure`
1051  Exposure to set the filter in.
1052  dataId : `dict`
1053  Dataset identifier.
1054  """
1055 
1056  if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI) or
1057  isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1058  return
1059 
1060  if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1061  return
1062 
1063  actualId = mapping.need(['filter'], dataId)
1064  filterName = actualId['filter']
1065  if self.filters is not None and filterName in self.filters:
1066  filterName = self.filters[filterName]
1067  item.setFilter(afwImage.Filter(filterName))
1068 
1069  # Default standardization function for exposures
1070  def _standardizeExposure(self, mapping, item, dataId, filter=True,
1071  trimmed=True, setVisitInfo=True):
1072  """Default standardization function for images.
1073 
1074  This sets the Detector from the camera geometry
1075  and optionally set the Fiter. In both cases this saves
1076  having to persist some data in each exposure (or image).
1077 
1078  Parameters
1079  ----------
1080  mapping : `lsst.obs.base.Mapping`
1081  Where to get the values from.
1082  item : image-like object
1083  Can be any of lsst.afw.image.Exposure,
1084  lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1085  or lsst.afw.image.MaskedImage
1086 
1087  dataId : `dict`
1088  Dataset identifier
1089  filter : `bool`
1090  Set filter? Ignored if item is already an exposure
1091  trimmed : `bool`
1092  Should detector be marked as trimmed?
1093  setVisitInfo : `bool`
1094  Should Exposure have its VisitInfo filled out from the metadata?
1095 
1096  Returns
1097  -------
1098  `lsst.afw.image.Exposure`
1099  The standardized Exposure.
1100  """
1101  try:
1102  item = exposureFromImage(item, dataId, mapper=self, logger=self.log, setVisitInfo=setVisitInfo)
1103  except Exception as e:
1104  self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1105  raise
1106 
1107  if mapping.level.lower() == "amp":
1108  self._setAmpDetector(item, dataId, trimmed)
1109  elif mapping.level.lower() == "ccd":
1110  self._setCcdDetector(item, dataId, trimmed)
1111 
1112  if filter:
1113  self._setFilter(mapping, item, dataId)
1114 
1115  return item
1116 
1117  def _defectLookup(self, dataId, dateKey='taiObs'):
1118  """Find the defects for a given CCD.
1119 
1120  Parameters
1121  ----------
1122  dataId : `dict`
1123  Dataset identifier
1124 
1125  Returns
1126  -------
1127  `str`
1128  Path to the defects file or None if not available.
1129  """
1130  if self.defectRegistry is None:
1131  return None
1132  if self.registry is None:
1133  raise RuntimeError("No registry for defect lookup")
1134 
1135  ccdKey, ccdVal = self._getCcdKeyVal(dataId)
1136 
1137  dataIdForLookup = {'visit': dataId['visit']}
1138  # .lookup will fail in a posix registry because there is no template to provide.
1139  rows = self.registry.lookup((dateKey), ('raw_visit'), dataIdForLookup)
1140  if len(rows) == 0:
1141  return None
1142  assert len(rows) == 1
1143  dayObs = rows[0][0]
1144 
1145  # Lookup the defects for this CCD serial number that are valid at the exposure midpoint.
1146  rows = self.defectRegistry.executeQuery(("path",), ("defect",),
1147  [(ccdKey, "?")],
1148  ("DATETIME(?)", "DATETIME(validStart)", "DATETIME(validEnd)"),
1149  (ccdVal, dayObs))
1150  if not rows or len(rows) == 0:
1151  return None
1152  if len(rows) == 1:
1153  return os.path.join(self.defectPath, rows[0][0])
1154  else:
1155  raise RuntimeError("Querying for defects (%s, %s) returns %d files: %s" %
1156  (ccdVal, dayObs, len(rows), ", ".join([_[0] for _ in rows])))
1157 
1158  def _makeCamera(self, policy, repositoryDir):
1159  """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1160  the camera geometry
1161 
1162  Also set self.cameraDataLocation, if relevant (else it can be left
1163  None).
1164 
1165  This implementation assumes that policy contains an entry "camera"
1166  that points to the subdirectory in this package of camera data;
1167  specifically, that subdirectory must contain:
1168  - a file named `camera.py` that contains persisted camera config
1169  - ampInfo table FITS files, as required by
1170  lsst.afw.cameraGeom.makeCameraFromPath
1171 
1172  Parameters
1173  ----------
1174  policy : `lsst.daf.persistence.Policy`
1175  Policy with per-camera defaults already merged
1176  (PexPolicy only for backward compatibility).
1177  repositoryDir : `str`
1178  Policy repository for the subclassing module (obtained with
1179  getRepositoryPath() on the per-camera default dictionary).
1180  """
1181  if 'camera' not in policy:
1182  raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1183  cameraDataSubdir = policy['camera']
1184  self.cameraDataLocation = os.path.normpath(
1185  os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1186  cameraConfig = afwCameraGeom.CameraConfig()
1187  cameraConfig.load(self.cameraDataLocation)
1188  ampInfoPath = os.path.dirname(self.cameraDataLocation)
1189  return afwCameraGeom.makeCameraFromPath(
1190  cameraConfig=cameraConfig,
1191  ampInfoPath=ampInfoPath,
1192  shortNameFunc=self.getShortCcdName,
1193  pupilFactoryClass=self.PupilFactoryClass
1194  )
1195 
1196  def getRegistry(self):
1197  """Get the registry used by this mapper.
1198 
1199  Returns
1200  -------
1201  Registry or None
1202  The registry used by this mapper for this mapper's repository.
1203  """
1204  return self.registry
1205 
1206  def getImageCompressionSettings(self, datasetType, dataId):
1207  """Stuff image compression settings into a daf.base.PropertySet
1208 
1209  This goes into the ButlerLocation's "additionalData", which gets
1210  passed into the boost::persistence framework.
1211 
1212  Parameters
1213  ----------
1214  datasetType : `str`
1215  Type of dataset for which to get the image compression settings.
1216  dataId : `dict`
1217  Dataset identifier.
1218 
1219  Returns
1220  -------
1221  additionalData : `lsst.daf.base.PropertySet`
1222  Image compression settings.
1223  """
1224  mapping = self.mappings[datasetType]
1225  recipeName = mapping.recipe
1226  storageType = mapping.storage
1227  if storageType not in self._writeRecipes:
1228  return dafBase.PropertySet()
1229  if recipeName not in self._writeRecipes[storageType]:
1230  raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1231  (datasetType, storageType, recipeName))
1232  recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1233  seed = hash(tuple(dataId.items())) % 2**31
1234  for plane in ("image", "mask", "variance"):
1235  if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1236  recipe.set(plane + ".scaling.seed", seed)
1237  return recipe
1238 
1239  def _initWriteRecipes(self):
1240  """Read the recipes for writing files
1241 
1242  These recipes are currently used for configuring FITS compression,
1243  but they could have wider uses for configuring different flavors
1244  of the storage types. A recipe is referred to by a symbolic name,
1245  which has associated settings. These settings are stored as a
1246  `PropertySet` so they can easily be passed down to the
1247  boost::persistence framework as the "additionalData" parameter.
1248 
1249  The list of recipes is written in YAML. A default recipe and
1250  some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1251  and these may be overridden or supplemented by the individual obs_*
1252  packages' own policy/writeRecipes.yaml files.
1253 
1254  Recipes are grouped by the storage type. Currently, only the
1255  ``FitsStorage`` storage type uses recipes, which uses it to
1256  configure FITS image compression.
1257 
1258  Each ``FitsStorage`` recipe for FITS compression should define
1259  "image", "mask" and "variance" entries, each of which may contain
1260  "compression" and "scaling" entries. Defaults will be provided for
1261  any missing elements under "compression" and "scaling".
1262 
1263  The allowed entries under "compression" are:
1264 
1265  * algorithm (string): compression algorithm to use
1266  * rows (int): number of rows per tile (0 = entire dimension)
1267  * columns (int): number of columns per tile (0 = entire dimension)
1268  * quantizeLevel (float): cfitsio quantization level
1269 
1270  The allowed entries under "scaling" are:
1271 
1272  * algorithm (string): scaling algorithm to use
1273  * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1274  * fuzz (bool): fuzz the values when quantising floating-point values?
1275  * seed (long): seed for random number generator when fuzzing
1276  * maskPlanes (list of string): mask planes to ignore when doing
1277  statistics
1278  * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1279  * quantizePad: number of stdev to allow on the low side (for
1280  STDEV_POSITIVE/NEGATIVE)
1281  * bscale: manually specified BSCALE (for MANUAL scaling)
1282  * bzero: manually specified BSCALE (for MANUAL scaling)
1283 
1284  A very simple example YAML recipe:
1285 
1286  FitsStorage:
1287  default:
1288  image: &default
1289  compression:
1290  algorithm: GZIP_SHUFFLE
1291  mask: *default
1292  variance: *default
1293  """
1294  recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1295  recipes = dafPersist.Policy(recipesFile)
1296  supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1297  validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1298  if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1299  supplements = dafPersist.Policy(supplementsFile)
1300  # Don't allow overrides, only supplements
1301  for entry in validationMenu:
1302  intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1303  if intersection:
1304  raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1305  (supplementsFile, entry, recipesFile, intersection))
1306  recipes.update(supplements)
1307 
1308  self._writeRecipes = {}
1309  for storageType in recipes.names(True):
1310  if "default" not in recipes[storageType]:
1311  raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1312  (storageType, recipesFile))
1313  self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1314 
1315 
1316 def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1317  """Generate an Exposure from an image-like object
1318 
1319  If the image is a DecoratedImage then also set its WCS and metadata
1320  (Image and MaskedImage are missing the necessary metadata
1321  and Exposure already has those set)
1322 
1323  Parameters
1324  ----------
1325  image : Image-like object
1326  Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1327  Exposure.
1328 
1329  Returns
1330  -------
1331  `lsst.afw.image.Exposure`
1332  Exposure containing input image.
1333  """
1334  metadata = None
1335  if isinstance(image, afwImage.MaskedImage):
1336  exposure = afwImage.makeExposure(image)
1337  elif isinstance(image, afwImage.DecoratedImage):
1338  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1339  metadata = image.getMetadata()
1340  try:
1341  wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1342  exposure.setWcs(wcs)
1343  except pexExcept.TypeError as e:
1344  # raised on failure to create a wcs (and possibly others)
1345  if logger is None:
1346  logger = lsstLog.Log.getLogger("CameraMapper")
1347  logger.debug("wcs set to None; insufficient information found in metadata to create a valid wcs:"
1348  " %s", e.args[0])
1349 
1350  exposure.setMetadata(metadata)
1351  elif isinstance(image, afwImage.Exposure):
1352  # Exposure
1353  exposure = image
1354  metadata = exposure.getMetadata()
1355  else:
1356  # Image
1357  exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1358  #
1359  # set VisitInfo if we can
1360  #
1361  if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1362  if metadata is not None:
1363  if mapper is None:
1364  if not logger:
1365  logger = lsstLog.Log.getLogger("CameraMapper")
1366  logger.warn("I can only set the VisitInfo if you provide a mapper")
1367  else:
1368  exposureId = mapper._computeCcdExposureId(dataId)
1369  visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1370 
1371  exposure.getInfo().setVisitInfo(visitInfo)
1372 
1373  return exposure
1374 
1375 
1377  """Validate recipes for FitsStorage
1378 
1379  The recipes are supplemented with default values where appropriate.
1380 
1381  TODO: replace this custom validation code with Cerberus (DM-11846)
1382 
1383  Parameters
1384  ----------
1385  recipes : `lsst.daf.persistence.Policy`
1386  FitsStorage recipes to validate.
1387 
1388  Returns
1389  -------
1390  validated : `lsst.daf.base.PropertySet`
1391  Validated FitsStorage recipe.
1392 
1393  Raises
1394  ------
1395  `RuntimeError`
1396  If validation fails.
1397  """
1398  # Schemas define what should be there, and the default values (and by the default
1399  # value, the expected type).
1400  compressionSchema = {
1401  "algorithm": "NONE",
1402  "rows": 1,
1403  "columns": 0,
1404  "quantizeLevel": 0.0,
1405  }
1406  scalingSchema = {
1407  "algorithm": "NONE",
1408  "bitpix": 0,
1409  "maskPlanes": ["NO_DATA"],
1410  "seed": 0,
1411  "quantizeLevel": 4.0,
1412  "quantizePad": 5.0,
1413  "fuzz": True,
1414  "bscale": 1.0,
1415  "bzero": 0.0,
1416  }
1417 
1418  def checkUnrecognized(entry, allowed, description):
1419  """Check to see if the entry contains unrecognised keywords"""
1420  unrecognized = set(entry.keys()) - set(allowed)
1421  if unrecognized:
1422  raise RuntimeError(
1423  "Unrecognized entries when parsing image compression recipe %s: %s" %
1424  (description, unrecognized))
1425 
1426  validated = {}
1427  for name in recipes.names(True):
1428  checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1429  rr = dafBase.PropertySet()
1430  validated[name] = rr
1431  for plane in ("image", "mask", "variance"):
1432  checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1433  name + "->" + plane)
1434 
1435  for settings, schema in (("compression", compressionSchema),
1436  ("scaling", scalingSchema)):
1437  prefix = plane + "." + settings
1438  if settings not in recipes[name][plane]:
1439  for key in schema:
1440  rr.set(prefix + "." + key, schema[key])
1441  continue
1442  entry = recipes[name][plane][settings]
1443  checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1444  for key in schema:
1445  value = type(schema[key])(entry[key]) if key in entry else schema[key]
1446  rr.set(prefix + "." + key, value)
1447  return validated
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def _defectLookup(self, dataId, dateKey='taiObs')
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _getCcdKeyVal(self, dataId)
Utility functions.