Coverage for python/lsst/obs/base/cameraMapper.py : 9%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import copy
23import os
24import re
25import traceback
26import weakref
28from astro_metadata_translator import fix_header
29from lsst.utils import doImport
30import lsst.daf.persistence as dafPersist
31from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
32import lsst.daf.base as dafBase
33import lsst.afw.geom as afwGeom
34import lsst.afw.image as afwImage
35import lsst.afw.table as afwTable
36from lsst.afw.fits import readMetadata
37import lsst.afw.cameraGeom as afwCameraGeom
38import lsst.log as lsstLog
39import lsst.pex.exceptions as pexExcept
40from .exposureIdInfo import ExposureIdInfo
41from .makeRawVisitInfo import MakeRawVisitInfo
42from .utils import createInitialSkyWcs, InitialSkyWcsError
43from lsst.utils import getPackageDir
44from ._instrument import Instrument
46__all__ = ["CameraMapper", "exposureFromImage"]
49class CameraMapper(dafPersist.Mapper):
51 """CameraMapper is a base class for mappers that handle images from a
52 camera and products derived from them. This provides an abstraction layer
53 between the data on disk and the code.
55 Public methods: keys, queryMetadata, getDatasetTypes, map,
56 canStandardize, standardize
58 Mappers for specific data sources (e.g., CFHT Megacam, LSST
59 simulations, etc.) should inherit this class.
61 The CameraMapper manages datasets within a "root" directory. Note that
62 writing to a dataset present in the input root will hide the existing
63 dataset but not overwrite it. See #2160 for design discussion.
65 A camera is assumed to consist of one or more rafts, each composed of
66 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
67 (amps). A camera is also assumed to have a camera geometry description
68 (CameraGeom object) as a policy file, a filter description (Filter class
69 static configuration) as another policy file.
71 Information from the camera geometry and defects are inserted into all
72 Exposure objects returned.
74 The mapper uses one or two registries to retrieve metadata about the
75 images. The first is a registry of all raw exposures. This must contain
76 the time of the observation. One or more tables (or the equivalent)
77 within the registry are used to look up data identifier components that
78 are not specified by the user (e.g. filter) and to return results for
79 metadata queries. The second is an optional registry of all calibration
80 data. This should contain validity start and end entries for each
81 calibration dataset in the same timescale as the observation time.
83 Subclasses will typically set MakeRawVisitInfoClass and optionally the
84 metadata translator class:
86 MakeRawVisitInfoClass: a class variable that points to a subclass of
87 MakeRawVisitInfo, a functor that creates an
88 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
90 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
91 class to use for fixing metadata values. If it is not set an attempt
92 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
93 that the metadata fixup will try to infer the translator class from the
94 header itself.
96 Subclasses must provide the following methods:
98 _extractDetectorName(self, dataId): returns the detector name for a CCD
99 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
100 a dataset identifier referring to that CCD or a subcomponent of it.
102 _computeCcdExposureId(self, dataId): see below
104 _computeCoaddExposureId(self, dataId, singleFilter): see below
106 Subclasses may also need to override the following methods:
108 _transformId(self, dataId): transformation of a data identifier
109 from colloquial usage (e.g., "ccdname") to proper/actual usage
110 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
111 commas). The default implementation does nothing. Note that this
112 method should not modify its input parameter.
114 getShortCcdName(self, ccdName): a static method that returns a shortened
115 name suitable for use as a filename. The default version converts spaces
116 to underscores.
118 _mapActualToPath(self, template, actualId): convert a template path to an
119 actual path, using the actual dataset identifier.
121 The mapper's behaviors are largely specified by the policy file.
122 See the MapperDictionary.paf for descriptions of the available items.
124 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
125 mappings (see Mappings class).
127 Common default mappings for all subclasses can be specified in the
128 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
129 provides a simple way to add a product to all camera mappers.
131 Functions to map (provide a path to the data given a dataset
132 identifier dictionary) and standardize (convert data into some standard
133 format or type) may be provided in the subclass as "map_{dataset type}"
134 and "std_{dataset type}", respectively.
136 If non-Exposure datasets cannot be retrieved using standard
137 daf_persistence methods alone, a "bypass_{dataset type}" function may be
138 provided in the subclass to return the dataset instead of using the
139 "datasets" subpolicy.
141 Implementations of map_camera and bypass_camera that should typically be
142 sufficient are provided in this base class.
144 Notes
145 -----
146 .. todo::
148 Instead of auto-loading the camera at construction time, load it from
149 the calibration registry
151 Parameters
152 ----------
153 policy : daf_persistence.Policy,
154 Policy with per-camera defaults already merged.
155 repositoryDir : string
156 Policy repository for the subclassing module (obtained with
157 getRepositoryPath() on the per-camera default dictionary).
158 root : string, optional
159 Path to the root directory for data.
160 registry : string, optional
161 Path to registry with data's metadata.
162 calibRoot : string, optional
163 Root directory for calibrations.
164 calibRegistry : string, optional
165 Path to registry with calibrations' metadata.
166 provided : list of string, optional
167 Keys provided by the mapper.
168 parentRegistry : Registry subclass, optional
169 Registry from a parent repository that may be used to look up
170 data's metadata.
171 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
172 The configuration information for the repository this mapper is
173 being used with.
174 """
175 packageName = None
177 # a class or subclass of MakeRawVisitInfo, a functor that makes an
178 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
179 MakeRawVisitInfoClass = MakeRawVisitInfo
181 # a class or subclass of PupilFactory
182 PupilFactoryClass = afwCameraGeom.PupilFactory
184 # Class to use for metadata translations
185 translatorClass = None
187 # Gen3 instrument corresponding to this mapper
188 # Can be a class or a string with the full name of the class
189 _gen3instrument = None
191 def __init__(self, policy, repositoryDir,
192 root=None, registry=None, calibRoot=None, calibRegistry=None,
193 provided=None, parentRegistry=None, repositoryCfg=None):
195 dafPersist.Mapper.__init__(self)
197 self.log = lsstLog.Log.getLogger("CameraMapper")
199 if root:
200 self.root = root
201 elif repositoryCfg:
202 self.root = repositoryCfg.root
203 else:
204 self.root = None
206 repoPolicy = repositoryCfg.policy if repositoryCfg else None
207 if repoPolicy is not None:
208 policy.update(repoPolicy)
210 # Levels
211 self.levels = dict()
212 if 'levels' in policy:
213 levelsPolicy = policy['levels']
214 for key in levelsPolicy.names(True):
215 self.levels[key] = set(levelsPolicy.asArray(key))
216 self.defaultLevel = policy['defaultLevel']
217 self.defaultSubLevels = dict()
218 if 'defaultSubLevels' in policy:
219 self.defaultSubLevels = policy['defaultSubLevels']
221 # Root directories
222 if root is None:
223 root = "."
224 root = dafPersist.LogicalLocation(root).locString()
226 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
228 # If the calibRoot is passed in, use that. If not and it's indicated in
229 # the policy, use that. And otherwise, the calibs are in the regular
230 # root.
231 # If the location indicated by the calib root does not exist, do not
232 # create it.
233 calibStorage = None
234 if calibRoot is not None:
235 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
236 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
237 create=False)
238 else:
239 calibRoot = policy.get('calibRoot', None)
240 if calibRoot:
241 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
242 create=False)
243 if calibStorage is None:
244 calibStorage = self.rootStorage
246 self.root = root
248 # Registries
249 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
250 self.rootStorage, searchParents=False,
251 posixIfNoSql=(not parentRegistry))
252 if not self.registry:
253 self.registry = parentRegistry
254 needCalibRegistry = policy.get('needCalibRegistry', None)
255 if needCalibRegistry:
256 if calibStorage:
257 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
258 "calibRegistryPath", calibStorage,
259 posixIfNoSql=False) # NB never use posix for calibs
260 else:
261 raise RuntimeError(
262 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
263 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
264 else:
265 self.calibRegistry = None
267 # Dict of valid keys and their value types
268 self.keyDict = dict()
270 self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
271 self._initWriteRecipes()
273 # Camera geometry
274 self.cameraDataLocation = None # path to camera geometry config file
275 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
277 # Filter translation table
278 self.filters = None
280 # verify that the class variable packageName is set before attempting
281 # to instantiate an instance
282 if self.packageName is None:
283 raise ValueError('class variable packageName must not be None')
285 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log)
287 # Assign a metadata translator if one has not been defined by
288 # subclass. We can sometimes infer one from the RawVisitInfo
289 # class.
290 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
291 self.translatorClass = self.makeRawVisitInfo.metadataTranslator
293 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
294 """Initialize mappings
296 For each of the dataset types that we want to be able to read, there
297 are methods that can be created to support them:
298 * map_<dataset> : determine the path for dataset
299 * std_<dataset> : standardize the retrieved dataset
300 * bypass_<dataset> : retrieve the dataset (bypassing the usual
301 retrieval machinery)
302 * query_<dataset> : query the registry
304 Besides the dataset types explicitly listed in the policy, we create
305 additional, derived datasets for additional conveniences,
306 e.g., reading the header of an image, retrieving only the size of a
307 catalog.
309 Parameters
310 ----------
311 policy : `lsst.daf.persistence.Policy`
312 Policy with per-camera defaults already merged
313 rootStorage : `Storage subclass instance`
314 Interface to persisted repository data.
315 calibRoot : `Storage subclass instance`
316 Interface to persisted calib repository data
317 provided : `list` of `str`
318 Keys provided by the mapper
319 """
320 # Sub-dictionaries (for exposure/calibration/dataset types)
321 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base", "ImageMappingDefaults.yaml", "policy"))
323 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324 "obs_base", "ExposureMappingDefaults.yaml", "policy"))
325 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
326 "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
327 dsMappingPolicy = dafPersist.Policy()
329 # Mappings
330 mappingList = (
331 ("images", imgMappingPolicy, ImageMapping),
332 ("exposures", expMappingPolicy, ExposureMapping),
333 ("calibrations", calMappingPolicy, CalibrationMapping),
334 ("datasets", dsMappingPolicy, DatasetMapping)
335 )
336 self.mappings = dict()
337 for name, defPolicy, cls in mappingList:
338 if name in policy:
339 datasets = policy[name]
341 # Centrally-defined datasets
342 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
343 if os.path.exists(defaultsPath):
344 datasets.merge(dafPersist.Policy(defaultsPath))
346 mappings = dict()
347 setattr(self, name, mappings)
348 for datasetType in datasets.names(True):
349 subPolicy = datasets[datasetType]
350 subPolicy.merge(defPolicy)
352 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
353 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
354 subPolicy=subPolicy):
355 components = subPolicy.get('composite')
356 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
357 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
358 python = subPolicy['python']
359 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
360 disassembler=disassembler,
361 python=python,
362 dataId=dataId,
363 mapper=self)
364 for name, component in components.items():
365 butlerComposite.add(id=name,
366 datasetType=component.get('datasetType'),
367 setter=component.get('setter', None),
368 getter=component.get('getter', None),
369 subset=component.get('subset', False),
370 inputOnly=component.get('inputOnly', False))
371 return butlerComposite
372 setattr(self, "map_" + datasetType, compositeClosure)
373 # for now at least, don't set up any other handling for this dataset type.
374 continue
376 if name == "calibrations":
377 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
378 provided=provided, dataRoot=rootStorage)
379 else:
380 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
382 if datasetType in self.mappings:
383 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
384 self.keyDict.update(mapping.keys())
385 mappings[datasetType] = mapping
386 self.mappings[datasetType] = mapping
387 if not hasattr(self, "map_" + datasetType):
388 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
389 return mapping.map(mapper, dataId, write)
390 setattr(self, "map_" + datasetType, mapClosure)
391 if not hasattr(self, "query_" + datasetType):
392 def queryClosure(format, dataId, mapping=mapping):
393 return mapping.lookup(format, dataId)
394 setattr(self, "query_" + datasetType, queryClosure)
395 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
396 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
397 return mapping.standardize(mapper, item, dataId)
398 setattr(self, "std_" + datasetType, stdClosure)
400 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
401 """Set convenience methods on CameraMapper"""
402 mapName = "map_" + datasetType + "_" + suffix
403 bypassName = "bypass_" + datasetType + "_" + suffix
404 queryName = "query_" + datasetType + "_" + suffix
405 if not hasattr(self, mapName):
406 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
407 if not hasattr(self, bypassName):
408 if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
409 bypassImpl = getattr(self, "bypass_" + datasetType)
410 if bypassImpl is not None:
411 setattr(self, bypassName, bypassImpl)
412 if not hasattr(self, queryName):
413 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
415 # Filename of dataset
416 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
417 [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
418 # Metadata from FITS file
419 if subPolicy["storage"] == "FitsStorage": # a FITS image
420 def getMetadata(datasetType, pythonType, location, dataId):
421 md = readMetadata(location.getLocationsWithRoot()[0])
422 fix_header(md, translator_class=self.translatorClass)
423 return md
425 setMethods("md", bypassImpl=getMetadata)
427 # Add support for configuring FITS compression
428 addName = "add_" + datasetType
429 if not hasattr(self, addName):
430 setattr(self, addName, self.getImageCompressionSettings)
432 if name == "exposures":
433 def getSkyWcs(datasetType, pythonType, location, dataId):
434 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
435 return fitsReader.readWcs()
437 setMethods("wcs", bypassImpl=getSkyWcs)
439 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
440 """Create a SkyWcs from the un-modified raw FITS WCS header keys."""
441 if datasetType[:3] != "raw":
442 raise dafPersist.NoResults("Can only get header WCS for raw exposures.",
443 datasetType, dataId)
444 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
446 setMethods("header_wcs", bypassImpl=getRawHeaderWcs)
448 def getPhotoCalib(datasetType, pythonType, location, dataId):
449 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
450 return fitsReader.readPhotoCalib()
452 setMethods("photoCalib", bypassImpl=getPhotoCalib)
454 def getVisitInfo(datasetType, pythonType, location, dataId):
455 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
456 return fitsReader.readVisitInfo()
458 setMethods("visitInfo", bypassImpl=getVisitInfo)
460 def getFilter(datasetType, pythonType, location, dataId):
461 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
462 return fitsReader.readFilter()
464 setMethods("filter", bypassImpl=getFilter)
466 setMethods("detector",
467 mapImpl=lambda dataId, write=False:
468 dafPersist.ButlerLocation(
469 pythonType="lsst.afw.cameraGeom.CameraConfig",
470 cppType="Config",
471 storageName="Internal",
472 locationList="ignored",
473 dataId=dataId,
474 mapper=self,
475 storage=None,
476 ),
477 bypassImpl=lambda datasetType, pythonType, location, dataId:
478 self.camera[self._extractDetectorName(dataId)]
479 )
481 def getBBox(datasetType, pythonType, location, dataId):
482 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
483 fix_header(md, translator_class=self.translatorClass)
484 return afwImage.bboxFromMetadata(md)
486 setMethods("bbox", bypassImpl=getBBox)
488 elif name == "images":
489 def getBBox(datasetType, pythonType, location, dataId):
490 md = readMetadata(location.getLocationsWithRoot()[0])
491 fix_header(md, translator_class=self.translatorClass)
492 return afwImage.bboxFromMetadata(md)
493 setMethods("bbox", bypassImpl=getBBox)
495 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
497 def getMetadata(datasetType, pythonType, location, dataId):
498 md = readMetadata(os.path.join(location.getStorage().root,
499 location.getLocations()[0]), hdu=1)
500 fix_header(md, translator_class=self.translatorClass)
501 return md
503 setMethods("md", bypassImpl=getMetadata)
505 # Sub-images
506 if subPolicy["storage"] == "FitsStorage":
507 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
508 subId = dataId.copy()
509 del subId['bbox']
510 loc = mapping.map(mapper, subId, write)
511 bbox = dataId['bbox']
512 llcX = bbox.getMinX()
513 llcY = bbox.getMinY()
514 width = bbox.getWidth()
515 height = bbox.getHeight()
516 loc.additionalData.set('llcX', llcX)
517 loc.additionalData.set('llcY', llcY)
518 loc.additionalData.set('width', width)
519 loc.additionalData.set('height', height)
520 if 'imageOrigin' in dataId:
521 loc.additionalData.set('imageOrigin',
522 dataId['imageOrigin'])
523 return loc
525 def querySubClosure(key, format, dataId, mapping=mapping):
526 subId = dataId.copy()
527 del subId['bbox']
528 return mapping.lookup(format, subId)
529 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
531 if subPolicy["storage"] == "FitsCatalogStorage":
532 # Length of catalog
534 def getLen(datasetType, pythonType, location, dataId):
535 md = readMetadata(os.path.join(location.getStorage().root,
536 location.getLocations()[0]), hdu=1)
537 fix_header(md, translator_class=self.translatorClass)
538 return md["NAXIS2"]
540 setMethods("len", bypassImpl=getLen)
542 # Schema of catalog
543 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
544 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
545 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
546 location.getLocations()[0])))
548 def _computeCcdExposureId(self, dataId):
549 """Compute the 64-bit (long) identifier for a CCD exposure.
551 Subclasses must override
553 Parameters
554 ----------
555 dataId : `dict`
556 Data identifier with visit, ccd.
557 """
558 raise NotImplementedError()
560 def _computeCoaddExposureId(self, dataId, singleFilter):
561 """Compute the 64-bit (long) identifier for a coadd.
563 Subclasses must override
565 Parameters
566 ----------
567 dataId : `dict`
568 Data identifier with tract and patch.
569 singleFilter : `bool`
570 True means the desired ID is for a single-filter coadd, in which
571 case dataIdmust contain filter.
572 """
573 raise NotImplementedError()
575 def _search(self, path):
576 """Search for path in the associated repository's storage.
578 Parameters
579 ----------
580 path : string
581 Path that describes an object in the repository associated with
582 this mapper.
583 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
584 indicator will be stripped when searching and so will match
585 filenames without the HDU indicator, e.g. 'foo.fits'. The path
586 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
588 Returns
589 -------
590 string
591 The path for this object in the repository. Will return None if the
592 object can't be found. If the input argument path contained an HDU
593 indicator, the returned path will also contain the HDU indicator.
594 """
595 return self.rootStorage.search(path)
597 def backup(self, datasetType, dataId):
598 """Rename any existing object with the given type and dataId.
600 The CameraMapper implementation saves objects in a sequence of e.g.:
602 - foo.fits
603 - foo.fits~1
604 - foo.fits~2
606 All of the backups will be placed in the output repo, however, and will
607 not be removed if they are found elsewhere in the _parent chain. This
608 means that the same file will be stored twice if the previous version
609 was found in an input repo.
610 """
612 # Calling PosixStorage directly is not the long term solution in this
613 # function, this is work-in-progress on epic DM-6225. The plan is for
614 # parentSearch to be changed to 'search', and search only the storage
615 # associated with this mapper. All searching of parents will be handled
616 # by traversing the container of repositories in Butler.
618 def firstElement(list):
619 """Get the first element in the list, or None if that can't be
620 done.
621 """
622 return list[0] if list is not None and len(list) else None
624 n = 0
625 newLocation = self.map(datasetType, dataId, write=True)
626 newPath = newLocation.getLocations()[0]
627 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
628 path = firstElement(path)
629 oldPaths = []
630 while path is not None:
631 n += 1
632 oldPaths.append((n, path))
633 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
634 path = firstElement(path)
635 for n, oldPath in reversed(oldPaths):
636 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
638 def keys(self):
639 """Return supported keys.
641 Returns
642 -------
643 iterable
644 List of keys usable in a dataset identifier
645 """
646 return iter(self.keyDict.keys())
648 def getKeys(self, datasetType, level):
649 """Return a dict of supported keys and their value types for a given
650 dataset type at a given level of the key hierarchy.
652 Parameters
653 ----------
654 datasetType : `str`
655 Dataset type or None for all dataset types.
656 level : `str` or None
657 Level or None for all levels or '' for the default level for the
658 camera.
660 Returns
661 -------
662 `dict`
663 Keys are strings usable in a dataset identifier, values are their
664 value types.
665 """
667 # not sure if this is how we want to do this. what if None was intended?
668 if level == '':
669 level = self.getDefaultLevel()
671 if datasetType is None:
672 keyDict = copy.copy(self.keyDict)
673 else:
674 keyDict = self.mappings[datasetType].keys()
675 if level is not None and level in self.levels:
676 keyDict = copy.copy(keyDict)
677 for lev in self.levels[level]:
678 if lev in keyDict:
679 del keyDict[lev]
680 return keyDict
682 def getDefaultLevel(self):
683 return self.defaultLevel
685 def getDefaultSubLevel(self, level):
686 if level in self.defaultSubLevels:
687 return self.defaultSubLevels[level]
688 return None
690 @classmethod
691 def getCameraName(cls):
692 """Return the name of the camera that this CameraMapper is for."""
693 className = str(cls)
694 className = className[className.find('.'):-1]
695 m = re.search(r'(\w+)Mapper', className)
696 if m is None:
697 m = re.search(r"class '[\w.]*?(\w+)'", className)
698 name = m.group(1)
699 return name[:1].lower() + name[1:] if name else ''
701 @classmethod
702 def getPackageName(cls):
703 """Return the name of the package containing this CameraMapper."""
704 if cls.packageName is None:
705 raise ValueError('class variable packageName must not be None')
706 return cls.packageName
708 @classmethod
709 def getGen3Instrument(cls):
710 """Return the gen3 Instrument class equivalent for this gen2 Mapper.
712 Returns
713 -------
714 instr : `type`
715 A `~lsst.obs.base.Instrument` class.
716 """
717 if cls._gen3instrument is None:
718 raise NotImplementedError("Please provide a specific implementation for your instrument"
719 " to enable conversion of this gen2 repository to gen3")
720 if isinstance(cls._gen3instrument, str):
721 # Given a string to convert to an instrument class
722 cls._gen3instrument = doImport(cls._gen3instrument)
723 if not issubclass(cls._gen3instrument, Instrument):
724 raise ValueError(f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}"
725 " but that is not an lsst.obs.base.Instrument")
726 return cls._gen3instrument
728 @classmethod
729 def getPackageDir(cls):
730 """Return the base directory of this package"""
731 return getPackageDir(cls.getPackageName())
733 def map_camera(self, dataId, write=False):
734 """Map a camera dataset."""
735 if self.camera is None:
736 raise RuntimeError("No camera dataset available.")
737 actualId = self._transformId(dataId)
738 return dafPersist.ButlerLocation(
739 pythonType="lsst.afw.cameraGeom.CameraConfig",
740 cppType="Config",
741 storageName="ConfigStorage",
742 locationList=self.cameraDataLocation or "ignored",
743 dataId=actualId,
744 mapper=self,
745 storage=self.rootStorage
746 )
748 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
749 """Return the (preloaded) camera object.
750 """
751 if self.camera is None:
752 raise RuntimeError("No camera dataset available.")
753 return self.camera
755 def map_expIdInfo(self, dataId, write=False):
756 return dafPersist.ButlerLocation(
757 pythonType="lsst.obs.base.ExposureIdInfo",
758 cppType=None,
759 storageName="Internal",
760 locationList="ignored",
761 dataId=dataId,
762 mapper=self,
763 storage=self.rootStorage
764 )
766 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
767 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
768 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
769 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
770 return ExposureIdInfo(expId=expId, expBits=expBits)
772 def std_bfKernel(self, item, dataId):
773 """Disable standardization for bfKernel
775 bfKernel is a calibration product that is numpy array,
776 unlike other calibration products that are all images;
777 all calibration images are sent through _standardizeExposure
778 due to CalibrationMapping, but we don't want that to happen to bfKernel
779 """
780 return item
782 def std_raw(self, item, dataId):
783 """Standardize a raw dataset by converting it to an Exposure instead
784 of an Image"""
785 return self._standardizeExposure(self.exposures['raw'], item, dataId,
786 trimmed=False, setVisitInfo=True)
788 def map_skypolicy(self, dataId):
789 """Map a sky policy."""
790 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
791 "Internal", None, None, self,
792 storage=self.rootStorage)
794 def std_skypolicy(self, item, dataId):
795 """Standardize a sky policy by returning the one we use."""
796 return self.skypolicy
798###############################################################################
799#
800# Utility functions
801#
802###############################################################################
804 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
805 posixIfNoSql=True):
806 """Set up a registry (usually SQLite3), trying a number of possible
807 paths.
809 Parameters
810 ----------
811 name : string
812 Name of registry.
813 description: `str`
814 Description of registry (for log messages)
815 path : string
816 Path for registry.
817 policy : string
818 Policy that contains the registry name, used if path is None.
819 policyKey : string
820 Key in policy for registry path.
821 storage : Storage subclass
822 Repository Storage to look in.
823 searchParents : bool, optional
824 True if the search for a registry should follow any Butler v1
825 _parent symlinks.
826 posixIfNoSql : bool, optional
827 If an sqlite registry is not found, will create a posix registry if
828 this is True.
830 Returns
831 -------
832 lsst.daf.persistence.Registry
833 Registry object
834 """
835 if path is None and policyKey in policy:
836 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
837 if os.path.isabs(path):
838 raise RuntimeError("Policy should not indicate an absolute path for registry.")
839 if not storage.exists(path):
840 newPath = storage.instanceSearch(path)
842 newPath = newPath[0] if newPath is not None and len(newPath) else None
843 if newPath is None:
844 self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
845 path)
846 path = newPath
847 else:
848 self.log.warn("Unable to locate registry at policy path: %s", path)
849 path = None
851 # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
852 # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
853 # root from path. Currently only works with PosixStorage
854 try:
855 root = storage.root
856 if path and (path.startswith(root)):
857 path = path[len(root + '/'):]
858 except AttributeError:
859 pass
861 # determine if there is an sqlite registry and if not, try the posix registry.
862 registry = None
864 def search(filename, description):
865 """Search for file in storage
867 Parameters
868 ----------
869 filename : `str`
870 Filename to search for
871 description : `str`
872 Description of file, for error message.
874 Returns
875 -------
876 path : `str` or `None`
877 Path to file, or None
878 """
879 result = storage.instanceSearch(filename)
880 if result:
881 return result[0]
882 self.log.debug("Unable to locate %s: %s", description, filename)
883 return None
885 # Search for a suitable registry database
886 if path is None:
887 path = search("%s.pgsql" % name, "%s in root" % description)
888 if path is None:
889 path = search("%s.sqlite3" % name, "%s in root" % description)
890 if path is None:
891 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
893 if path is not None:
894 if not storage.exists(path):
895 newPath = storage.instanceSearch(path)
896 newPath = newPath[0] if newPath is not None and len(newPath) else None
897 if newPath is not None:
898 path = newPath
899 localFileObj = storage.getLocalFile(path)
900 self.log.info("Loading %s registry from %s", description, localFileObj.name)
901 registry = dafPersist.Registry.create(localFileObj.name)
902 localFileObj.close()
903 elif not registry and posixIfNoSql:
904 try:
905 self.log.info("Loading Posix %s registry from %s", description, storage.root)
906 registry = dafPersist.PosixRegistry(storage.root)
907 except Exception:
908 registry = None
910 return registry
912 def _transformId(self, dataId):
913 """Generate a standard ID dict from a camera-specific ID dict.
915 Canonical keys include:
916 - amp: amplifier name
917 - ccd: CCD name (in LSST this is a combination of raft and sensor)
918 The default implementation returns a copy of its input.
920 Parameters
921 ----------
922 dataId : `dict`
923 Dataset identifier; this must not be modified
925 Returns
926 -------
927 `dict`
928 Transformed dataset identifier.
929 """
931 return dataId.copy()
933 def _mapActualToPath(self, template, actualId):
934 """Convert a template path to an actual path, using the actual data
935 identifier. This implementation is usually sufficient but can be
936 overridden by the subclass.
938 Parameters
939 ----------
940 template : `str`
941 Template path
942 actualId : `dict`
943 Dataset identifier
945 Returns
946 -------
947 `str`
948 Pathname
949 """
951 try:
952 transformedId = self._transformId(actualId)
953 return template % transformedId
954 except Exception as e:
955 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
957 @staticmethod
958 def getShortCcdName(ccdName):
959 """Convert a CCD name to a form useful as a filename
961 The default implementation converts spaces to underscores.
962 """
963 return ccdName.replace(" ", "_")
965 def _extractDetectorName(self, dataId):
966 """Extract the detector (CCD) name from the dataset identifier.
968 The name in question is the detector name used by lsst.afw.cameraGeom.
970 Parameters
971 ----------
972 dataId : `dict`
973 Dataset identifier.
975 Returns
976 -------
977 `str`
978 Detector name
979 """
980 raise NotImplementedError("No _extractDetectorName() function specified")
982 def _setAmpDetector(self, item, dataId, trimmed=True):
983 """Set the detector object in an Exposure for an amplifier.
985 Defects are also added to the Exposure based on the detector object.
987 Parameters
988 ----------
989 item : `lsst.afw.image.Exposure`
990 Exposure to set the detector in.
991 dataId : `dict`
992 Dataset identifier
993 trimmed : `bool`
994 Should detector be marked as trimmed? (ignored)
995 """
997 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
999 def _setCcdDetector(self, item, dataId, trimmed=True):
1000 """Set the detector object in an Exposure for a CCD.
1002 Parameters
1003 ----------
1004 item : `lsst.afw.image.Exposure`
1005 Exposure to set the detector in.
1006 dataId : `dict`
1007 Dataset identifier
1008 trimmed : `bool`
1009 Should detector be marked as trimmed? (ignored)
1010 """
1011 if item.getDetector() is not None:
1012 return
1014 detectorName = self._extractDetectorName(dataId)
1015 detector = self.camera[detectorName]
1016 item.setDetector(detector)
1018 def _setFilter(self, mapping, item, dataId):
1019 """Set the filter object in an Exposure. If the Exposure had a FILTER
1020 keyword, this was already processed during load. But if it didn't,
1021 use the filter from the registry.
1023 Parameters
1024 ----------
1025 mapping : `lsst.obs.base.Mapping`
1026 Where to get the filter from.
1027 item : `lsst.afw.image.Exposure`
1028 Exposure to set the filter in.
1029 dataId : `dict`
1030 Dataset identifier.
1031 """
1033 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI)
1034 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1035 return
1037 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1038 return
1040 actualId = mapping.need(['filter'], dataId)
1041 filterName = actualId['filter']
1042 if self.filters is not None and filterName in self.filters:
1043 filterName = self.filters[filterName]
1044 try:
1045 item.setFilter(afwImage.Filter(filterName))
1046 except pexExcept.NotFoundError:
1047 self.log.warn("Filter %s not defined. Set to UNKNOWN." % (filterName))
1049 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1050 trimmed=True, setVisitInfo=True):
1051 """Default standardization function for images.
1053 This sets the Detector from the camera geometry
1054 and optionally set the Filter. In both cases this saves
1055 having to persist some data in each exposure (or image).
1057 Parameters
1058 ----------
1059 mapping : `lsst.obs.base.Mapping`
1060 Where to get the values from.
1061 item : image-like object
1062 Can be any of lsst.afw.image.Exposure,
1063 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1064 or lsst.afw.image.MaskedImage
1066 dataId : `dict`
1067 Dataset identifier
1068 filter : `bool`
1069 Set filter? Ignored if item is already an exposure
1070 trimmed : `bool`
1071 Should detector be marked as trimmed?
1072 setVisitInfo : `bool`
1073 Should Exposure have its VisitInfo filled out from the metadata?
1075 Returns
1076 -------
1077 `lsst.afw.image.Exposure`
1078 The standardized Exposure.
1079 """
1080 try:
1081 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log,
1082 setVisitInfo=setVisitInfo)
1083 except Exception as e:
1084 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1085 raise
1087 if mapping.level.lower() == "amp":
1088 self._setAmpDetector(exposure, dataId, trimmed)
1089 elif mapping.level.lower() == "ccd":
1090 self._setCcdDetector(exposure, dataId, trimmed)
1092 # We can only create a WCS if it doesn't already have one and
1093 # we have either a VisitInfo or exposure metadata.
1094 # Do not calculate a WCS if this is an amplifier exposure
1095 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \
1096 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()):
1097 self._createInitialSkyWcs(exposure)
1099 if filter:
1100 self._setFilter(mapping, exposure, dataId)
1102 return exposure
1104 def _createSkyWcsFromMetadata(self, exposure):
1105 """Create a SkyWcs from the FITS header metadata in an Exposure.
1107 Parameters
1108 ----------
1109 exposure : `lsst.afw.image.Exposure`
1110 The exposure to get metadata from, and attach the SkyWcs to.
1111 """
1112 metadata = exposure.getMetadata()
1113 fix_header(metadata, translator_class=self.translatorClass)
1114 try:
1115 wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1116 exposure.setWcs(wcs)
1117 except pexExcept.TypeError as e:
1118 # See DM-14372 for why this is debug and not warn (e.g. calib files without wcs metadata).
1119 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:"
1120 " %s", e.args[0])
1121 # ensure any WCS values stripped from the metadata are removed in the exposure
1122 exposure.setMetadata(metadata)
1124 def _createInitialSkyWcs(self, exposure):
1125 """Create a SkyWcs from the boresight and camera geometry.
1127 If the boresight or camera geometry do not support this method of
1128 WCS creation, this falls back on the header metadata-based version
1129 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1131 Parameters
1132 ----------
1133 exposure : `lsst.afw.image.Exposure`
1134 The exposure to get data from, and attach the SkyWcs to.
1135 """
1136 # Always use try to use metadata first, to strip WCS keys from it.
1137 self._createSkyWcsFromMetadata(exposure)
1139 if exposure.getInfo().getVisitInfo() is None:
1140 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1141 self.log.warn(msg)
1142 return
1143 try:
1144 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1145 exposure.setWcs(newSkyWcs)
1146 except InitialSkyWcsError as e:
1147 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1148 self.log.warn(msg, e)
1149 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1150 if e.__context__ is not None:
1151 self.log.debug("Root-cause Exception was: %s",
1152 traceback.TracebackException.from_exception(e.__context__))
1154 def _makeCamera(self, policy, repositoryDir):
1155 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1156 the camera geometry
1158 Also set self.cameraDataLocation, if relevant (else it can be left
1159 None).
1161 This implementation assumes that policy contains an entry "camera"
1162 that points to the subdirectory in this package of camera data;
1163 specifically, that subdirectory must contain:
1164 - a file named `camera.py` that contains persisted camera config
1165 - ampInfo table FITS files, as required by
1166 lsst.afw.cameraGeom.makeCameraFromPath
1168 Parameters
1169 ----------
1170 policy : `lsst.daf.persistence.Policy`
1171 Policy with per-camera defaults already merged
1172 (PexPolicy only for backward compatibility).
1173 repositoryDir : `str`
1174 Policy repository for the subclassing module (obtained with
1175 getRepositoryPath() on the per-camera default dictionary).
1176 """
1177 if 'camera' not in policy:
1178 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1179 cameraDataSubdir = policy['camera']
1180 self.cameraDataLocation = os.path.normpath(
1181 os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1182 cameraConfig = afwCameraGeom.CameraConfig()
1183 cameraConfig.load(self.cameraDataLocation)
1184 ampInfoPath = os.path.dirname(self.cameraDataLocation)
1185 return afwCameraGeom.makeCameraFromPath(
1186 cameraConfig=cameraConfig,
1187 ampInfoPath=ampInfoPath,
1188 shortNameFunc=self.getShortCcdName,
1189 pupilFactoryClass=self.PupilFactoryClass
1190 )
1192 def getRegistry(self):
1193 """Get the registry used by this mapper.
1195 Returns
1196 -------
1197 Registry or None
1198 The registry used by this mapper for this mapper's repository.
1199 """
1200 return self.registry
1202 def getImageCompressionSettings(self, datasetType, dataId):
1203 """Stuff image compression settings into a daf.base.PropertySet
1205 This goes into the ButlerLocation's "additionalData", which gets
1206 passed into the boost::persistence framework.
1208 Parameters
1209 ----------
1210 datasetType : `str`
1211 Type of dataset for which to get the image compression settings.
1212 dataId : `dict`
1213 Dataset identifier.
1215 Returns
1216 -------
1217 additionalData : `lsst.daf.base.PropertySet`
1218 Image compression settings.
1219 """
1220 mapping = self.mappings[datasetType]
1221 recipeName = mapping.recipe
1222 storageType = mapping.storage
1223 if storageType not in self._writeRecipes:
1224 return dafBase.PropertySet()
1225 if recipeName not in self._writeRecipes[storageType]:
1226 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1227 (datasetType, storageType, recipeName))
1228 recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1229 seed = hash(tuple(dataId.items())) % 2**31
1230 for plane in ("image", "mask", "variance"):
1231 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1232 recipe.set(plane + ".scaling.seed", seed)
1233 return recipe
1235 def _initWriteRecipes(self):
1236 """Read the recipes for writing files
1238 These recipes are currently used for configuring FITS compression,
1239 but they could have wider uses for configuring different flavors
1240 of the storage types. A recipe is referred to by a symbolic name,
1241 which has associated settings. These settings are stored as a
1242 `PropertySet` so they can easily be passed down to the
1243 boost::persistence framework as the "additionalData" parameter.
1245 The list of recipes is written in YAML. A default recipe and
1246 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1247 and these may be overridden or supplemented by the individual obs_*
1248 packages' own policy/writeRecipes.yaml files.
1250 Recipes are grouped by the storage type. Currently, only the
1251 ``FitsStorage`` storage type uses recipes, which uses it to
1252 configure FITS image compression.
1254 Each ``FitsStorage`` recipe for FITS compression should define
1255 "image", "mask" and "variance" entries, each of which may contain
1256 "compression" and "scaling" entries. Defaults will be provided for
1257 any missing elements under "compression" and "scaling".
1259 The allowed entries under "compression" are:
1261 * algorithm (string): compression algorithm to use
1262 * rows (int): number of rows per tile (0 = entire dimension)
1263 * columns (int): number of columns per tile (0 = entire dimension)
1264 * quantizeLevel (float): cfitsio quantization level
1266 The allowed entries under "scaling" are:
1268 * algorithm (string): scaling algorithm to use
1269 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1270 * fuzz (bool): fuzz the values when quantising floating-point values?
1271 * seed (long): seed for random number generator when fuzzing
1272 * maskPlanes (list of string): mask planes to ignore when doing
1273 statistics
1274 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1275 * quantizePad: number of stdev to allow on the low side (for
1276 STDEV_POSITIVE/NEGATIVE)
1277 * bscale: manually specified BSCALE (for MANUAL scaling)
1278 * bzero: manually specified BSCALE (for MANUAL scaling)
1280 A very simple example YAML recipe:
1282 FitsStorage:
1283 default:
1284 image: &default
1285 compression:
1286 algorithm: GZIP_SHUFFLE
1287 mask: *default
1288 variance: *default
1289 """
1290 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1291 recipes = dafPersist.Policy(recipesFile)
1292 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1293 validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1294 if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1295 supplements = dafPersist.Policy(supplementsFile)
1296 # Don't allow overrides, only supplements
1297 for entry in validationMenu:
1298 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1299 if intersection:
1300 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1301 (supplementsFile, entry, recipesFile, intersection))
1302 recipes.update(supplements)
1304 self._writeRecipes = {}
1305 for storageType in recipes.names(True):
1306 if "default" not in recipes[storageType]:
1307 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1308 (storageType, recipesFile))
1309 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1312def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1313 """Generate an Exposure from an image-like object
1315 If the image is a DecoratedImage then also set its WCS and metadata
1316 (Image and MaskedImage are missing the necessary metadata
1317 and Exposure already has those set)
1319 Parameters
1320 ----------
1321 image : Image-like object
1322 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1323 Exposure.
1325 Returns
1326 -------
1327 `lsst.afw.image.Exposure`
1328 Exposure containing input image.
1329 """
1330 translatorClass = None
1331 if mapper is not None:
1332 translatorClass = mapper.translatorClass
1334 metadata = None
1335 if isinstance(image, afwImage.MaskedImage):
1336 exposure = afwImage.makeExposure(image)
1337 elif isinstance(image, afwImage.DecoratedImage):
1338 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1339 metadata = image.getMetadata()
1340 fix_header(metadata, translator_class=translatorClass)
1341 exposure.setMetadata(metadata)
1342 elif isinstance(image, afwImage.Exposure):
1343 exposure = image
1344 metadata = exposure.getMetadata()
1345 fix_header(metadata, translator_class=translatorClass)
1346 else: # Image
1347 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1349 # set VisitInfo if we can
1350 if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1351 if metadata is not None:
1352 if mapper is None:
1353 if not logger:
1354 logger = lsstLog.Log.getLogger("CameraMapper")
1355 logger.warn("I can only set the VisitInfo if you provide a mapper")
1356 else:
1357 exposureId = mapper._computeCcdExposureId(dataId)
1358 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1360 exposure.getInfo().setVisitInfo(visitInfo)
1362 return exposure
1365def validateRecipeFitsStorage(recipes):
1366 """Validate recipes for FitsStorage
1368 The recipes are supplemented with default values where appropriate.
1370 TODO: replace this custom validation code with Cerberus (DM-11846)
1372 Parameters
1373 ----------
1374 recipes : `lsst.daf.persistence.Policy`
1375 FitsStorage recipes to validate.
1377 Returns
1378 -------
1379 validated : `lsst.daf.base.PropertySet`
1380 Validated FitsStorage recipe.
1382 Raises
1383 ------
1384 `RuntimeError`
1385 If validation fails.
1386 """
1387 # Schemas define what should be there, and the default values (and by the default
1388 # value, the expected type).
1389 compressionSchema = {
1390 "algorithm": "NONE",
1391 "rows": 1,
1392 "columns": 0,
1393 "quantizeLevel": 0.0,
1394 }
1395 scalingSchema = {
1396 "algorithm": "NONE",
1397 "bitpix": 0,
1398 "maskPlanes": ["NO_DATA"],
1399 "seed": 0,
1400 "quantizeLevel": 4.0,
1401 "quantizePad": 5.0,
1402 "fuzz": True,
1403 "bscale": 1.0,
1404 "bzero": 0.0,
1405 }
1407 def checkUnrecognized(entry, allowed, description):
1408 """Check to see if the entry contains unrecognised keywords"""
1409 unrecognized = set(entry.keys()) - set(allowed)
1410 if unrecognized:
1411 raise RuntimeError(
1412 "Unrecognized entries when parsing image compression recipe %s: %s" %
1413 (description, unrecognized))
1415 validated = {}
1416 for name in recipes.names(True):
1417 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1418 rr = dafBase.PropertySet()
1419 validated[name] = rr
1420 for plane in ("image", "mask", "variance"):
1421 checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1422 name + "->" + plane)
1424 for settings, schema in (("compression", compressionSchema),
1425 ("scaling", scalingSchema)):
1426 prefix = plane + "." + settings
1427 if settings not in recipes[name][plane]:
1428 for key in schema:
1429 rr.set(prefix + "." + key, schema[key])
1430 continue
1431 entry = recipes[name][plane][settings]
1432 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1433 for key in schema:
1434 value = type(schema[key])(entry[key]) if key in entry else schema[key]
1435 rr.set(prefix + "." + key, value)
1436 return validated