Coverage for python/lsst/obs/base/cameraMapper.py : 9%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import copy
23import os
24import re
25import traceback
26import weakref
28from deprecated.sphinx import deprecated
30from astro_metadata_translator import fix_header
31import lsst.daf.persistence as dafPersist
32from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
33import lsst.daf.base as dafBase
34import lsst.afw.geom as afwGeom
35import lsst.afw.image as afwImage
36import lsst.afw.table as afwTable
37from lsst.afw.fits import readMetadata
38import lsst.afw.cameraGeom as afwCameraGeom
39import lsst.log as lsstLog
40import lsst.pex.exceptions as pexExcept
41from .exposureIdInfo import ExposureIdInfo
42from .makeRawVisitInfo import MakeRawVisitInfo
43from .utils import createInitialSkyWcs, InitialSkyWcsError
44from lsst.utils import getPackageDir
46__all__ = ["CameraMapper", "exposureFromImage"]
49class CameraMapper(dafPersist.Mapper):
51 """CameraMapper is a base class for mappers that handle images from a
52 camera and products derived from them. This provides an abstraction layer
53 between the data on disk and the code.
55 Public methods: keys, queryMetadata, getDatasetTypes, map,
56 canStandardize, standardize
58 Mappers for specific data sources (e.g., CFHT Megacam, LSST
59 simulations, etc.) should inherit this class.
61 The CameraMapper manages datasets within a "root" directory. Note that
62 writing to a dataset present in the input root will hide the existing
63 dataset but not overwrite it. See #2160 for design discussion.
65 A camera is assumed to consist of one or more rafts, each composed of
66 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
67 (amps). A camera is also assumed to have a camera geometry description
68 (CameraGeom object) as a policy file, a filter description (Filter class
69 static configuration) as another policy file.
71 Information from the camera geometry and defects are inserted into all
72 Exposure objects returned.
74 The mapper uses one or two registries to retrieve metadata about the
75 images. The first is a registry of all raw exposures. This must contain
76 the time of the observation. One or more tables (or the equivalent)
77 within the registry are used to look up data identifier components that
78 are not specified by the user (e.g. filter) and to return results for
79 metadata queries. The second is an optional registry of all calibration
80 data. This should contain validity start and end entries for each
81 calibration dataset in the same timescale as the observation time.
83 Subclasses will typically set MakeRawVisitInfoClass and optionally the
84 metadata translator class:
86 MakeRawVisitInfoClass: a class variable that points to a subclass of
87 MakeRawVisitInfo, a functor that creates an
88 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
90 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
91 class to use for fixing metadata values. If it is not set an attempt
92 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
93 that the metadata fixup will try to infer the translator class from the
94 header itself.
96 Subclasses must provide the following methods:
98 _extractDetectorName(self, dataId): returns the detector name for a CCD
99 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
100 a dataset identifier referring to that CCD or a subcomponent of it.
102 _computeCcdExposureId(self, dataId): see below
104 _computeCoaddExposureId(self, dataId, singleFilter): see below
106 Subclasses may also need to override the following methods:
108 _transformId(self, dataId): transformation of a data identifier
109 from colloquial usage (e.g., "ccdname") to proper/actual usage
110 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
111 commas). The default implementation does nothing. Note that this
112 method should not modify its input parameter.
114 getShortCcdName(self, ccdName): a static method that returns a shortened
115 name suitable for use as a filename. The default version converts spaces
116 to underscores.
118 _mapActualToPath(self, template, actualId): convert a template path to an
119 actual path, using the actual dataset identifier.
121 The mapper's behaviors are largely specified by the policy file.
122 See the MapperDictionary.paf for descriptions of the available items.
124 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
125 mappings (see Mappings class).
127 Common default mappings for all subclasses can be specified in the
128 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
129 provides a simple way to add a product to all camera mappers.
131 Functions to map (provide a path to the data given a dataset
132 identifier dictionary) and standardize (convert data into some standard
133 format or type) may be provided in the subclass as "map_{dataset type}"
134 and "std_{dataset type}", respectively.
136 If non-Exposure datasets cannot be retrieved using standard
137 daf_persistence methods alone, a "bypass_{dataset type}" function may be
138 provided in the subclass to return the dataset instead of using the
139 "datasets" subpolicy.
141 Implementations of map_camera and bypass_camera that should typically be
142 sufficient are provided in this base class.
144 Notes
145 -----
146 .. todo::
148 Instead of auto-loading the camera at construction time, load it from
149 the calibration registry
151 Parameters
152 ----------
153 policy : daf_persistence.Policy,
154 Policy with per-camera defaults already merged.
155 repositoryDir : string
156 Policy repository for the subclassing module (obtained with
157 getRepositoryPath() on the per-camera default dictionary).
158 root : string, optional
159 Path to the root directory for data.
160 registry : string, optional
161 Path to registry with data's metadata.
162 calibRoot : string, optional
163 Root directory for calibrations.
164 calibRegistry : string, optional
165 Path to registry with calibrations' metadata.
166 provided : list of string, optional
167 Keys provided by the mapper.
168 parentRegistry : Registry subclass, optional
169 Registry from a parent repository that may be used to look up
170 data's metadata.
171 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
172 The configuration information for the repository this mapper is
173 being used with.
174 """
175 packageName = None
177 # a class or subclass of MakeRawVisitInfo, a functor that makes an
178 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
179 MakeRawVisitInfoClass = MakeRawVisitInfo
181 # a class or subclass of PupilFactory
182 PupilFactoryClass = afwCameraGeom.PupilFactory
184 # Class to use for metadata translations
185 translatorClass = None
187 def __init__(self, policy, repositoryDir,
188 root=None, registry=None, calibRoot=None, calibRegistry=None,
189 provided=None, parentRegistry=None, repositoryCfg=None):
191 dafPersist.Mapper.__init__(self)
193 self.log = lsstLog.Log.getLogger("CameraMapper")
195 if root:
196 self.root = root
197 elif repositoryCfg:
198 self.root = repositoryCfg.root
199 else:
200 self.root = None
202 repoPolicy = repositoryCfg.policy if repositoryCfg else None
203 if repoPolicy is not None:
204 policy.update(repoPolicy)
206 # Levels
207 self.levels = dict()
208 if 'levels' in policy:
209 levelsPolicy = policy['levels']
210 for key in levelsPolicy.names(True):
211 self.levels[key] = set(levelsPolicy.asArray(key))
212 self.defaultLevel = policy['defaultLevel']
213 self.defaultSubLevels = dict()
214 if 'defaultSubLevels' in policy:
215 self.defaultSubLevels = policy['defaultSubLevels']
217 # Root directories
218 if root is None:
219 root = "."
220 root = dafPersist.LogicalLocation(root).locString()
222 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
224 # If the calibRoot is passed in, use that. If not and it's indicated in
225 # the policy, use that. And otherwise, the calibs are in the regular
226 # root.
227 # If the location indicated by the calib root does not exist, do not
228 # create it.
229 calibStorage = None
230 if calibRoot is not None:
231 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
232 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
233 create=False)
234 else:
235 calibRoot = policy.get('calibRoot', None)
236 if calibRoot:
237 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
238 create=False)
239 if calibStorage is None:
240 calibStorage = self.rootStorage
242 self.root = root
244 # Registries
245 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
246 self.rootStorage, searchParents=False,
247 posixIfNoSql=(not parentRegistry))
248 if not self.registry:
249 self.registry = parentRegistry
250 needCalibRegistry = policy.get('needCalibRegistry', None)
251 if needCalibRegistry:
252 if calibStorage:
253 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
254 "calibRegistryPath", calibStorage,
255 posixIfNoSql=False) # NB never use posix for calibs
256 else:
257 raise RuntimeError(
258 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
259 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
260 else:
261 self.calibRegistry = None
263 # Dict of valid keys and their value types
264 self.keyDict = dict()
266 self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
267 self._initWriteRecipes()
269 # Camera geometry
270 self.cameraDataLocation = None # path to camera geometry config file
271 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
273 # Filter translation table
274 self.filters = None
276 # verify that the class variable packageName is set before attempting
277 # to instantiate an instance
278 if self.packageName is None:
279 raise ValueError('class variable packageName must not be None')
281 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log)
283 # Assign a metadata translator if one has not been defined by
284 # subclass. We can sometimes infer one from the RawVisitInfo
285 # class.
286 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
287 self.translatorClass = self.makeRawVisitInfo.metadataTranslator
289 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
290 """Initialize mappings
292 For each of the dataset types that we want to be able to read, there
293 are methods that can be created to support them:
294 * map_<dataset> : determine the path for dataset
295 * std_<dataset> : standardize the retrieved dataset
296 * bypass_<dataset> : retrieve the dataset (bypassing the usual
297 retrieval machinery)
298 * query_<dataset> : query the registry
300 Besides the dataset types explicitly listed in the policy, we create
301 additional, derived datasets for additional conveniences,
302 e.g., reading the header of an image, retrieving only the size of a
303 catalog.
305 Parameters
306 ----------
307 policy : `lsst.daf.persistence.Policy`
308 Policy with per-camera defaults already merged
309 rootStorage : `Storage subclass instance`
310 Interface to persisted repository data.
311 calibRoot : `Storage subclass instance`
312 Interface to persisted calib repository data
313 provided : `list` of `str`
314 Keys provided by the mapper
315 """
316 # Sub-dictionaries (for exposure/calibration/dataset types)
317 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318 "obs_base", "ImageMappingDefaults.yaml", "policy"))
319 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320 "obs_base", "ExposureMappingDefaults.yaml", "policy"))
321 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
323 dsMappingPolicy = dafPersist.Policy()
325 # Mappings
326 mappingList = (
327 ("images", imgMappingPolicy, ImageMapping),
328 ("exposures", expMappingPolicy, ExposureMapping),
329 ("calibrations", calMappingPolicy, CalibrationMapping),
330 ("datasets", dsMappingPolicy, DatasetMapping)
331 )
332 self.mappings = dict()
333 for name, defPolicy, cls in mappingList:
334 if name in policy:
335 datasets = policy[name]
337 # Centrally-defined datasets
338 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
339 if os.path.exists(defaultsPath):
340 datasets.merge(dafPersist.Policy(defaultsPath))
342 mappings = dict()
343 setattr(self, name, mappings)
344 for datasetType in datasets.names(True):
345 subPolicy = datasets[datasetType]
346 subPolicy.merge(defPolicy)
348 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
349 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
350 subPolicy=subPolicy):
351 components = subPolicy.get('composite')
352 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
353 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
354 python = subPolicy['python']
355 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
356 disassembler=disassembler,
357 python=python,
358 dataId=dataId,
359 mapper=self)
360 for name, component in components.items():
361 butlerComposite.add(id=name,
362 datasetType=component.get('datasetType'),
363 setter=component.get('setter', None),
364 getter=component.get('getter', None),
365 subset=component.get('subset', False),
366 inputOnly=component.get('inputOnly', False))
367 return butlerComposite
368 setattr(self, "map_" + datasetType, compositeClosure)
369 # for now at least, don't set up any other handling for this dataset type.
370 continue
372 if name == "calibrations":
373 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
374 provided=provided, dataRoot=rootStorage)
375 else:
376 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
378 if datasetType in self.mappings:
379 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
380 self.keyDict.update(mapping.keys())
381 mappings[datasetType] = mapping
382 self.mappings[datasetType] = mapping
383 if not hasattr(self, "map_" + datasetType):
384 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
385 return mapping.map(mapper, dataId, write)
386 setattr(self, "map_" + datasetType, mapClosure)
387 if not hasattr(self, "query_" + datasetType):
388 def queryClosure(format, dataId, mapping=mapping):
389 return mapping.lookup(format, dataId)
390 setattr(self, "query_" + datasetType, queryClosure)
391 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
392 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
393 return mapping.standardize(mapper, item, dataId)
394 setattr(self, "std_" + datasetType, stdClosure)
396 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
397 """Set convenience methods on CameraMapper"""
398 mapName = "map_" + datasetType + "_" + suffix
399 bypassName = "bypass_" + datasetType + "_" + suffix
400 queryName = "query_" + datasetType + "_" + suffix
401 if not hasattr(self, mapName):
402 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
403 if not hasattr(self, bypassName):
404 if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
405 bypassImpl = getattr(self, "bypass_" + datasetType)
406 if bypassImpl is not None:
407 setattr(self, bypassName, bypassImpl)
408 if not hasattr(self, queryName):
409 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
411 # Filename of dataset
412 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
413 [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
414 # Metadata from FITS file
415 if subPolicy["storage"] == "FitsStorage": # a FITS image
416 def getMetadata(datasetType, pythonType, location, dataId):
417 md = readMetadata(location.getLocationsWithRoot()[0])
418 fix_header(md, translator_class=self.translatorClass)
419 return md
421 setMethods("md", bypassImpl=getMetadata)
423 # Add support for configuring FITS compression
424 addName = "add_" + datasetType
425 if not hasattr(self, addName):
426 setattr(self, addName, self.getImageCompressionSettings)
428 if name == "exposures":
429 def getSkyWcs(datasetType, pythonType, location, dataId):
430 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
431 return fitsReader.readWcs()
433 setMethods("wcs", bypassImpl=getSkyWcs)
435 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
436 """Create a SkyWcs from the un-modified raw FITS WCS header keys."""
437 if datasetType[:3] != "raw":
438 raise dafPersist.NoResults("Can only get header WCS for raw exposures.",
439 datasetType, dataId)
440 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
442 setMethods("header_wcs", bypassImpl=getRawHeaderWcs)
444 def getPhotoCalib(datasetType, pythonType, location, dataId):
445 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
446 return fitsReader.readPhotoCalib()
448 setMethods("photoCalib", bypassImpl=getPhotoCalib)
450 def getVisitInfo(datasetType, pythonType, location, dataId):
451 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
452 return fitsReader.readVisitInfo()
454 setMethods("visitInfo", bypassImpl=getVisitInfo)
456 def getFilter(datasetType, pythonType, location, dataId):
457 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
458 return fitsReader.readFilter()
460 setMethods("filter", bypassImpl=getFilter)
462 setMethods("detector",
463 mapImpl=lambda dataId, write=False:
464 dafPersist.ButlerLocation(
465 pythonType="lsst.afw.cameraGeom.CameraConfig",
466 cppType="Config",
467 storageName="Internal",
468 locationList="ignored",
469 dataId=dataId,
470 mapper=self,
471 storage=None,
472 ),
473 bypassImpl=lambda datasetType, pythonType, location, dataId:
474 self.camera[self._extractDetectorName(dataId)]
475 )
477 def getBBox(datasetType, pythonType, location, dataId):
478 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
479 fix_header(md, translator_class=self.translatorClass)
480 return afwImage.bboxFromMetadata(md)
482 setMethods("bbox", bypassImpl=getBBox)
484 elif name == "images":
485 def getBBox(datasetType, pythonType, location, dataId):
486 md = readMetadata(location.getLocationsWithRoot()[0])
487 fix_header(md, translator_class=self.translatorClass)
488 return afwImage.bboxFromMetadata(md)
489 setMethods("bbox", bypassImpl=getBBox)
491 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
493 def getMetadata(datasetType, pythonType, location, dataId):
494 md = readMetadata(os.path.join(location.getStorage().root,
495 location.getLocations()[0]), hdu=1)
496 fix_header(md, translator_class=self.translatorClass)
497 return md
499 setMethods("md", bypassImpl=getMetadata)
501 # Sub-images
502 if subPolicy["storage"] == "FitsStorage":
503 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
504 subId = dataId.copy()
505 del subId['bbox']
506 loc = mapping.map(mapper, subId, write)
507 bbox = dataId['bbox']
508 llcX = bbox.getMinX()
509 llcY = bbox.getMinY()
510 width = bbox.getWidth()
511 height = bbox.getHeight()
512 loc.additionalData.set('llcX', llcX)
513 loc.additionalData.set('llcY', llcY)
514 loc.additionalData.set('width', width)
515 loc.additionalData.set('height', height)
516 if 'imageOrigin' in dataId:
517 loc.additionalData.set('imageOrigin',
518 dataId['imageOrigin'])
519 return loc
521 def querySubClosure(key, format, dataId, mapping=mapping):
522 subId = dataId.copy()
523 del subId['bbox']
524 return mapping.lookup(format, subId)
525 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
527 if subPolicy["storage"] == "FitsCatalogStorage":
528 # Length of catalog
530 def getLen(datasetType, pythonType, location, dataId):
531 md = readMetadata(os.path.join(location.getStorage().root,
532 location.getLocations()[0]), hdu=1)
533 fix_header(md, translator_class=self.translatorClass)
534 return md["NAXIS2"]
536 setMethods("len", bypassImpl=getLen)
538 # Schema of catalog
539 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
540 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
541 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
542 location.getLocations()[0])))
544 def _computeCcdExposureId(self, dataId):
545 """Compute the 64-bit (long) identifier for a CCD exposure.
547 Subclasses must override
549 Parameters
550 ----------
551 dataId : `dict`
552 Data identifier with visit, ccd.
553 """
554 raise NotImplementedError()
556 def _computeCoaddExposureId(self, dataId, singleFilter):
557 """Compute the 64-bit (long) identifier for a coadd.
559 Subclasses must override
561 Parameters
562 ----------
563 dataId : `dict`
564 Data identifier with tract and patch.
565 singleFilter : `bool`
566 True means the desired ID is for a single-filter coadd, in which
567 case dataIdmust contain filter.
568 """
569 raise NotImplementedError()
571 def _search(self, path):
572 """Search for path in the associated repository's storage.
574 Parameters
575 ----------
576 path : string
577 Path that describes an object in the repository associated with
578 this mapper.
579 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
580 indicator will be stripped when searching and so will match
581 filenames without the HDU indicator, e.g. 'foo.fits'. The path
582 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
584 Returns
585 -------
586 string
587 The path for this object in the repository. Will return None if the
588 object can't be found. If the input argument path contained an HDU
589 indicator, the returned path will also contain the HDU indicator.
590 """
591 return self.rootStorage.search(path)
593 def backup(self, datasetType, dataId):
594 """Rename any existing object with the given type and dataId.
596 The CameraMapper implementation saves objects in a sequence of e.g.:
598 - foo.fits
599 - foo.fits~1
600 - foo.fits~2
602 All of the backups will be placed in the output repo, however, and will
603 not be removed if they are found elsewhere in the _parent chain. This
604 means that the same file will be stored twice if the previous version
605 was found in an input repo.
606 """
608 # Calling PosixStorage directly is not the long term solution in this
609 # function, this is work-in-progress on epic DM-6225. The plan is for
610 # parentSearch to be changed to 'search', and search only the storage
611 # associated with this mapper. All searching of parents will be handled
612 # by traversing the container of repositories in Butler.
614 def firstElement(list):
615 """Get the first element in the list, or None if that can't be
616 done.
617 """
618 return list[0] if list is not None and len(list) else None
620 n = 0
621 newLocation = self.map(datasetType, dataId, write=True)
622 newPath = newLocation.getLocations()[0]
623 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
624 path = firstElement(path)
625 oldPaths = []
626 while path is not None:
627 n += 1
628 oldPaths.append((n, path))
629 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
630 path = firstElement(path)
631 for n, oldPath in reversed(oldPaths):
632 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
634 def keys(self):
635 """Return supported keys.
637 Returns
638 -------
639 iterable
640 List of keys usable in a dataset identifier
641 """
642 return iter(self.keyDict.keys())
644 def getKeys(self, datasetType, level):
645 """Return a dict of supported keys and their value types for a given
646 dataset type at a given level of the key hierarchy.
648 Parameters
649 ----------
650 datasetType : `str`
651 Dataset type or None for all dataset types.
652 level : `str` or None
653 Level or None for all levels or '' for the default level for the
654 camera.
656 Returns
657 -------
658 `dict`
659 Keys are strings usable in a dataset identifier, values are their
660 value types.
661 """
663 # not sure if this is how we want to do this. what if None was intended?
664 if level == '':
665 level = self.getDefaultLevel()
667 if datasetType is None:
668 keyDict = copy.copy(self.keyDict)
669 else:
670 keyDict = self.mappings[datasetType].keys()
671 if level is not None and level in self.levels:
672 keyDict = copy.copy(keyDict)
673 for l in self.levels[level]:
674 if l in keyDict:
675 del keyDict[l]
676 return keyDict
678 def getDefaultLevel(self):
679 return self.defaultLevel
681 def getDefaultSubLevel(self, level):
682 if level in self.defaultSubLevels:
683 return self.defaultSubLevels[level]
684 return None
686 @classmethod
687 def getCameraName(cls):
688 """Return the name of the camera that this CameraMapper is for."""
689 className = str(cls)
690 className = className[className.find('.'):-1]
691 m = re.search(r'(\w+)Mapper', className)
692 if m is None:
693 m = re.search(r"class '[\w.]*?(\w+)'", className)
694 name = m.group(1)
695 return name[:1].lower() + name[1:] if name else ''
697 @classmethod
698 def getPackageName(cls):
699 """Return the name of the package containing this CameraMapper."""
700 if cls.packageName is None:
701 raise ValueError('class variable packageName must not be None')
702 return cls.packageName
704 @classmethod
705 def getPackageDir(cls):
706 """Return the base directory of this package"""
707 return getPackageDir(cls.getPackageName())
709 def map_camera(self, dataId, write=False):
710 """Map a camera dataset."""
711 if self.camera is None:
712 raise RuntimeError("No camera dataset available.")
713 actualId = self._transformId(dataId)
714 return dafPersist.ButlerLocation(
715 pythonType="lsst.afw.cameraGeom.CameraConfig",
716 cppType="Config",
717 storageName="ConfigStorage",
718 locationList=self.cameraDataLocation or "ignored",
719 dataId=actualId,
720 mapper=self,
721 storage=self.rootStorage
722 )
724 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
725 """Return the (preloaded) camera object.
726 """
727 if self.camera is None:
728 raise RuntimeError("No camera dataset available.")
729 return self.camera
731 def map_expIdInfo(self, dataId, write=False):
732 return dafPersist.ButlerLocation(
733 pythonType="lsst.obs.base.ExposureIdInfo",
734 cppType=None,
735 storageName="Internal",
736 locationList="ignored",
737 dataId=dataId,
738 mapper=self,
739 storage=self.rootStorage
740 )
742 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
743 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
744 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
745 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
746 return ExposureIdInfo(expId=expId, expBits=expBits)
748 def std_bfKernel(self, item, dataId):
749 """Disable standardization for bfKernel
751 bfKernel is a calibration product that is numpy array,
752 unlike other calibration products that are all images;
753 all calibration images are sent through _standardizeExposure
754 due to CalibrationMapping, but we don't want that to happen to bfKernel
755 """
756 return item
758 def std_raw(self, item, dataId):
759 """Standardize a raw dataset by converting it to an Exposure instead
760 of an Image"""
761 return self._standardizeExposure(self.exposures['raw'], item, dataId,
762 trimmed=False, setVisitInfo=True)
764 def map_skypolicy(self, dataId):
765 """Map a sky policy."""
766 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
767 "Internal", None, None, self,
768 storage=self.rootStorage)
770 def std_skypolicy(self, item, dataId):
771 """Standardize a sky policy by returning the one we use."""
772 return self.skypolicy
774###############################################################################
775#
776# Utility functions
777#
778###############################################################################
780 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
781 posixIfNoSql=True):
782 """Set up a registry (usually SQLite3), trying a number of possible
783 paths.
785 Parameters
786 ----------
787 name : string
788 Name of registry.
789 description: `str`
790 Description of registry (for log messages)
791 path : string
792 Path for registry.
793 policy : string
794 Policy that contains the registry name, used if path is None.
795 policyKey : string
796 Key in policy for registry path.
797 storage : Storage subclass
798 Repository Storage to look in.
799 searchParents : bool, optional
800 True if the search for a registry should follow any Butler v1
801 _parent symlinks.
802 posixIfNoSql : bool, optional
803 If an sqlite registry is not found, will create a posix registry if
804 this is True.
806 Returns
807 -------
808 lsst.daf.persistence.Registry
809 Registry object
810 """
811 if path is None and policyKey in policy:
812 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
813 if os.path.isabs(path):
814 raise RuntimeError("Policy should not indicate an absolute path for registry.")
815 if not storage.exists(path):
816 newPath = storage.instanceSearch(path)
818 newPath = newPath[0] if newPath is not None and len(newPath) else None
819 if newPath is None:
820 self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
821 path)
822 path = newPath
823 else:
824 self.log.warn("Unable to locate registry at policy path: %s", path)
825 path = None
827 # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
828 # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
829 # root from path. Currently only works with PosixStorage
830 try:
831 root = storage.root
832 if path and (path.startswith(root)):
833 path = path[len(root + '/'):]
834 except AttributeError:
835 pass
837 # determine if there is an sqlite registry and if not, try the posix registry.
838 registry = None
840 def search(filename, description):
841 """Search for file in storage
843 Parameters
844 ----------
845 filename : `str`
846 Filename to search for
847 description : `str`
848 Description of file, for error message.
850 Returns
851 -------
852 path : `str` or `None`
853 Path to file, or None
854 """
855 result = storage.instanceSearch(filename)
856 if result:
857 return result[0]
858 self.log.debug("Unable to locate %s: %s", description, filename)
859 return None
861 # Search for a suitable registry database
862 if path is None:
863 path = search("%s.pgsql" % name, "%s in root" % description)
864 if path is None:
865 path = search("%s.sqlite3" % name, "%s in root" % description)
866 if path is None:
867 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
869 if path is not None:
870 if not storage.exists(path):
871 newPath = storage.instanceSearch(path)
872 newPath = newPath[0] if newPath is not None and len(newPath) else None
873 if newPath is not None:
874 path = newPath
875 localFileObj = storage.getLocalFile(path)
876 self.log.info("Loading %s registry from %s", description, localFileObj.name)
877 registry = dafPersist.Registry.create(localFileObj.name)
878 localFileObj.close()
879 elif not registry and posixIfNoSql:
880 try:
881 self.log.info("Loading Posix %s registry from %s", description, storage.root)
882 registry = dafPersist.PosixRegistry(storage.root)
883 except Exception:
884 registry = None
886 return registry
888 def _transformId(self, dataId):
889 """Generate a standard ID dict from a camera-specific ID dict.
891 Canonical keys include:
892 - amp: amplifier name
893 - ccd: CCD name (in LSST this is a combination of raft and sensor)
894 The default implementation returns a copy of its input.
896 Parameters
897 ----------
898 dataId : `dict`
899 Dataset identifier; this must not be modified
901 Returns
902 -------
903 `dict`
904 Transformed dataset identifier.
905 """
907 return dataId.copy()
909 def _mapActualToPath(self, template, actualId):
910 """Convert a template path to an actual path, using the actual data
911 identifier. This implementation is usually sufficient but can be
912 overridden by the subclass.
914 Parameters
915 ----------
916 template : `str`
917 Template path
918 actualId : `dict`
919 Dataset identifier
921 Returns
922 -------
923 `str`
924 Pathname
925 """
927 try:
928 transformedId = self._transformId(actualId)
929 return template % transformedId
930 except Exception as e:
931 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
933 @staticmethod
934 def getShortCcdName(ccdName):
935 """Convert a CCD name to a form useful as a filename
937 The default implementation converts spaces to underscores.
938 """
939 return ccdName.replace(" ", "_")
941 def _extractDetectorName(self, dataId):
942 """Extract the detector (CCD) name from the dataset identifier.
944 The name in question is the detector name used by lsst.afw.cameraGeom.
946 Parameters
947 ----------
948 dataId : `dict`
949 Dataset identifier.
951 Returns
952 -------
953 `str`
954 Detector name
955 """
956 raise NotImplementedError("No _extractDetectorName() function specified")
958 @deprecated("This method is no longer used for ISR (will be removed after v11)", category=FutureWarning)
959 def _extractAmpId(self, dataId):
960 """Extract the amplifier identifer from a dataset identifier.
962 .. note:: Deprecated in 11_0
964 amplifier identifier has two parts: the detector name for the CCD
965 containing the amplifier and index of the amplifier in the detector.
967 Parameters
968 ----------
969 dataId : `dict`
970 Dataset identifer
972 Returns
973 -------
974 `tuple`
975 Amplifier identifier
976 """
978 trDataId = self._transformId(dataId)
979 return (trDataId["ccd"], int(trDataId['amp']))
981 def _setAmpDetector(self, item, dataId, trimmed=True):
982 """Set the detector object in an Exposure for an amplifier.
984 Defects are also added to the Exposure based on the detector object.
986 Parameters
987 ----------
988 item : `lsst.afw.image.Exposure`
989 Exposure to set the detector in.
990 dataId : `dict`
991 Dataset identifier
992 trimmed : `bool`
993 Should detector be marked as trimmed? (ignored)
994 """
996 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
998 def _setCcdDetector(self, item, dataId, trimmed=True):
999 """Set the detector object in an Exposure for a CCD.
1001 Parameters
1002 ----------
1003 item : `lsst.afw.image.Exposure`
1004 Exposure to set the detector in.
1005 dataId : `dict`
1006 Dataset identifier
1007 trimmed : `bool`
1008 Should detector be marked as trimmed? (ignored)
1009 """
1010 if item.getDetector() is not None:
1011 return
1013 detectorName = self._extractDetectorName(dataId)
1014 detector = self.camera[detectorName]
1015 item.setDetector(detector)
1017 def _setFilter(self, mapping, item, dataId):
1018 """Set the filter object in an Exposure. If the Exposure had a FILTER
1019 keyword, this was already processed during load. But if it didn't,
1020 use the filter from the registry.
1022 Parameters
1023 ----------
1024 mapping : `lsst.obs.base.Mapping`
1025 Where to get the filter from.
1026 item : `lsst.afw.image.Exposure`
1027 Exposure to set the filter in.
1028 dataId : `dict`
1029 Dataset identifier.
1030 """
1032 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI)
1033 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1034 return
1036 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1037 return
1039 actualId = mapping.need(['filter'], dataId)
1040 filterName = actualId['filter']
1041 if self.filters is not None and filterName in self.filters:
1042 filterName = self.filters[filterName]
1043 try:
1044 item.setFilter(afwImage.Filter(filterName))
1045 except pexExcept.NotFoundError:
1046 self.log.warn("Filter %s not defined. Set to UNKNOWN." % (filterName))
1048 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1049 trimmed=True, setVisitInfo=True):
1050 """Default standardization function for images.
1052 This sets the Detector from the camera geometry
1053 and optionally set the Filter. In both cases this saves
1054 having to persist some data in each exposure (or image).
1056 Parameters
1057 ----------
1058 mapping : `lsst.obs.base.Mapping`
1059 Where to get the values from.
1060 item : image-like object
1061 Can be any of lsst.afw.image.Exposure,
1062 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1063 or lsst.afw.image.MaskedImage
1065 dataId : `dict`
1066 Dataset identifier
1067 filter : `bool`
1068 Set filter? Ignored if item is already an exposure
1069 trimmed : `bool`
1070 Should detector be marked as trimmed?
1071 setVisitInfo : `bool`
1072 Should Exposure have its VisitInfo filled out from the metadata?
1074 Returns
1075 -------
1076 `lsst.afw.image.Exposure`
1077 The standardized Exposure.
1078 """
1079 try:
1080 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log,
1081 setVisitInfo=setVisitInfo)
1082 except Exception as e:
1083 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1084 raise
1086 if mapping.level.lower() == "amp":
1087 self._setAmpDetector(exposure, dataId, trimmed)
1088 elif mapping.level.lower() == "ccd":
1089 self._setCcdDetector(exposure, dataId, trimmed)
1091 # We can only create a WCS if it doesn't already have one and
1092 # we have either a VisitInfo or exposure metadata.
1093 # Do not calculate a WCS if this is an amplifier exposure
1094 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \
1095 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()):
1096 self._createInitialSkyWcs(exposure)
1098 if filter:
1099 self._setFilter(mapping, exposure, dataId)
1101 return exposure
1103 def _createSkyWcsFromMetadata(self, exposure):
1104 """Create a SkyWcs from the FITS header metadata in an Exposure.
1106 Parameters
1107 ----------
1108 exposure : `lsst.afw.image.Exposure`
1109 The exposure to get metadata from, and attach the SkyWcs to.
1110 """
1111 metadata = exposure.getMetadata()
1112 try:
1113 wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1114 exposure.setWcs(wcs)
1115 except pexExcept.TypeError as e:
1116 # See DM-14372 for why this is debug and not warn (e.g. calib files without wcs metadata).
1117 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:"
1118 " %s", e.args[0])
1119 # ensure any WCS values stripped from the metadata are removed in the exposure
1120 exposure.setMetadata(metadata)
1122 def _createInitialSkyWcs(self, exposure):
1123 """Create a SkyWcs from the boresight and camera geometry.
1125 If the boresight or camera geometry do not support this method of
1126 WCS creation, this falls back on the header metadata-based version
1127 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1129 Parameters
1130 ----------
1131 exposure : `lsst.afw.image.Exposure`
1132 The exposure to get data from, and attach the SkyWcs to.
1133 """
1134 # Always use try to use metadata first, to strip WCS keys from it.
1135 self._createSkyWcsFromMetadata(exposure)
1137 if exposure.getInfo().getVisitInfo() is None:
1138 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1139 self.log.warn(msg)
1140 return
1141 try:
1142 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1143 exposure.setWcs(newSkyWcs)
1144 except InitialSkyWcsError as e:
1145 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1146 self.log.warn(msg, e)
1147 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1148 if e.__context__ is not None:
1149 self.log.debug("Root-cause Exception was: %s",
1150 traceback.TracebackException.from_exception(e.__context__))
1152 def _makeCamera(self, policy, repositoryDir):
1153 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1154 the camera geometry
1156 Also set self.cameraDataLocation, if relevant (else it can be left
1157 None).
1159 This implementation assumes that policy contains an entry "camera"
1160 that points to the subdirectory in this package of camera data;
1161 specifically, that subdirectory must contain:
1162 - a file named `camera.py` that contains persisted camera config
1163 - ampInfo table FITS files, as required by
1164 lsst.afw.cameraGeom.makeCameraFromPath
1166 Parameters
1167 ----------
1168 policy : `lsst.daf.persistence.Policy`
1169 Policy with per-camera defaults already merged
1170 (PexPolicy only for backward compatibility).
1171 repositoryDir : `str`
1172 Policy repository for the subclassing module (obtained with
1173 getRepositoryPath() on the per-camera default dictionary).
1174 """
1175 if 'camera' not in policy:
1176 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1177 cameraDataSubdir = policy['camera']
1178 self.cameraDataLocation = os.path.normpath(
1179 os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1180 cameraConfig = afwCameraGeom.CameraConfig()
1181 cameraConfig.load(self.cameraDataLocation)
1182 ampInfoPath = os.path.dirname(self.cameraDataLocation)
1183 return afwCameraGeom.makeCameraFromPath(
1184 cameraConfig=cameraConfig,
1185 ampInfoPath=ampInfoPath,
1186 shortNameFunc=self.getShortCcdName,
1187 pupilFactoryClass=self.PupilFactoryClass
1188 )
1190 def getRegistry(self):
1191 """Get the registry used by this mapper.
1193 Returns
1194 -------
1195 Registry or None
1196 The registry used by this mapper for this mapper's repository.
1197 """
1198 return self.registry
1200 def getImageCompressionSettings(self, datasetType, dataId):
1201 """Stuff image compression settings into a daf.base.PropertySet
1203 This goes into the ButlerLocation's "additionalData", which gets
1204 passed into the boost::persistence framework.
1206 Parameters
1207 ----------
1208 datasetType : `str`
1209 Type of dataset for which to get the image compression settings.
1210 dataId : `dict`
1211 Dataset identifier.
1213 Returns
1214 -------
1215 additionalData : `lsst.daf.base.PropertySet`
1216 Image compression settings.
1217 """
1218 mapping = self.mappings[datasetType]
1219 recipeName = mapping.recipe
1220 storageType = mapping.storage
1221 if storageType not in self._writeRecipes:
1222 return dafBase.PropertySet()
1223 if recipeName not in self._writeRecipes[storageType]:
1224 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1225 (datasetType, storageType, recipeName))
1226 recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1227 seed = hash(tuple(dataId.items())) % 2**31
1228 for plane in ("image", "mask", "variance"):
1229 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1230 recipe.set(plane + ".scaling.seed", seed)
1231 return recipe
1233 def _initWriteRecipes(self):
1234 """Read the recipes for writing files
1236 These recipes are currently used for configuring FITS compression,
1237 but they could have wider uses for configuring different flavors
1238 of the storage types. A recipe is referred to by a symbolic name,
1239 which has associated settings. These settings are stored as a
1240 `PropertySet` so they can easily be passed down to the
1241 boost::persistence framework as the "additionalData" parameter.
1243 The list of recipes is written in YAML. A default recipe and
1244 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1245 and these may be overridden or supplemented by the individual obs_*
1246 packages' own policy/writeRecipes.yaml files.
1248 Recipes are grouped by the storage type. Currently, only the
1249 ``FitsStorage`` storage type uses recipes, which uses it to
1250 configure FITS image compression.
1252 Each ``FitsStorage`` recipe for FITS compression should define
1253 "image", "mask" and "variance" entries, each of which may contain
1254 "compression" and "scaling" entries. Defaults will be provided for
1255 any missing elements under "compression" and "scaling".
1257 The allowed entries under "compression" are:
1259 * algorithm (string): compression algorithm to use
1260 * rows (int): number of rows per tile (0 = entire dimension)
1261 * columns (int): number of columns per tile (0 = entire dimension)
1262 * quantizeLevel (float): cfitsio quantization level
1264 The allowed entries under "scaling" are:
1266 * algorithm (string): scaling algorithm to use
1267 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1268 * fuzz (bool): fuzz the values when quantising floating-point values?
1269 * seed (long): seed for random number generator when fuzzing
1270 * maskPlanes (list of string): mask planes to ignore when doing
1271 statistics
1272 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1273 * quantizePad: number of stdev to allow on the low side (for
1274 STDEV_POSITIVE/NEGATIVE)
1275 * bscale: manually specified BSCALE (for MANUAL scaling)
1276 * bzero: manually specified BSCALE (for MANUAL scaling)
1278 A very simple example YAML recipe:
1280 FitsStorage:
1281 default:
1282 image: &default
1283 compression:
1284 algorithm: GZIP_SHUFFLE
1285 mask: *default
1286 variance: *default
1287 """
1288 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1289 recipes = dafPersist.Policy(recipesFile)
1290 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1291 validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1292 if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1293 supplements = dafPersist.Policy(supplementsFile)
1294 # Don't allow overrides, only supplements
1295 for entry in validationMenu:
1296 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1297 if intersection:
1298 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1299 (supplementsFile, entry, recipesFile, intersection))
1300 recipes.update(supplements)
1302 self._writeRecipes = {}
1303 for storageType in recipes.names(True):
1304 if "default" not in recipes[storageType]:
1305 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1306 (storageType, recipesFile))
1307 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1310def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1311 """Generate an Exposure from an image-like object
1313 If the image is a DecoratedImage then also set its WCS and metadata
1314 (Image and MaskedImage are missing the necessary metadata
1315 and Exposure already has those set)
1317 Parameters
1318 ----------
1319 image : Image-like object
1320 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1321 Exposure.
1323 Returns
1324 -------
1325 `lsst.afw.image.Exposure`
1326 Exposure containing input image.
1327 """
1328 metadata = None
1329 if isinstance(image, afwImage.MaskedImage):
1330 exposure = afwImage.makeExposure(image)
1331 elif isinstance(image, afwImage.DecoratedImage):
1332 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1333 metadata = image.getMetadata()
1334 exposure.setMetadata(metadata)
1335 elif isinstance(image, afwImage.Exposure):
1336 exposure = image
1337 metadata = exposure.getMetadata()
1338 else: # Image
1339 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1341 # set VisitInfo if we can
1342 if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1343 if metadata is not None:
1344 if mapper is None:
1345 if not logger:
1346 logger = lsstLog.Log.getLogger("CameraMapper")
1347 logger.warn("I can only set the VisitInfo if you provide a mapper")
1348 else:
1349 exposureId = mapper._computeCcdExposureId(dataId)
1350 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1352 exposure.getInfo().setVisitInfo(visitInfo)
1354 return exposure
1357def validateRecipeFitsStorage(recipes):
1358 """Validate recipes for FitsStorage
1360 The recipes are supplemented with default values where appropriate.
1362 TODO: replace this custom validation code with Cerberus (DM-11846)
1364 Parameters
1365 ----------
1366 recipes : `lsst.daf.persistence.Policy`
1367 FitsStorage recipes to validate.
1369 Returns
1370 -------
1371 validated : `lsst.daf.base.PropertySet`
1372 Validated FitsStorage recipe.
1374 Raises
1375 ------
1376 `RuntimeError`
1377 If validation fails.
1378 """
1379 # Schemas define what should be there, and the default values (and by the default
1380 # value, the expected type).
1381 compressionSchema = {
1382 "algorithm": "NONE",
1383 "rows": 1,
1384 "columns": 0,
1385 "quantizeLevel": 0.0,
1386 }
1387 scalingSchema = {
1388 "algorithm": "NONE",
1389 "bitpix": 0,
1390 "maskPlanes": ["NO_DATA"],
1391 "seed": 0,
1392 "quantizeLevel": 4.0,
1393 "quantizePad": 5.0,
1394 "fuzz": True,
1395 "bscale": 1.0,
1396 "bzero": 0.0,
1397 }
1399 def checkUnrecognized(entry, allowed, description):
1400 """Check to see if the entry contains unrecognised keywords"""
1401 unrecognized = set(entry.keys()) - set(allowed)
1402 if unrecognized:
1403 raise RuntimeError(
1404 "Unrecognized entries when parsing image compression recipe %s: %s" %
1405 (description, unrecognized))
1407 validated = {}
1408 for name in recipes.names(True):
1409 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1410 rr = dafBase.PropertySet()
1411 validated[name] = rr
1412 for plane in ("image", "mask", "variance"):
1413 checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1414 name + "->" + plane)
1416 for settings, schema in (("compression", compressionSchema),
1417 ("scaling", scalingSchema)):
1418 prefix = plane + "." + settings
1419 if settings not in recipes[name][plane]:
1420 for key in schema:
1421 rr.set(prefix + "." + key, schema[key])
1422 continue
1423 entry = recipes[name][plane][settings]
1424 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1425 for key in schema:
1426 value = type(schema[key])(entry[key]) if key in entry else schema[key]
1427 rr.set(prefix + "." + key, value)
1428 return validated