Coverage for python/lsst/obs/base/cameraMapper.py : 9%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import copy
23import os
24import re
25import traceback
26import weakref
28from astro_metadata_translator import fix_header
29import lsst.daf.persistence as dafPersist
30from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31import lsst.daf.base as dafBase
32import lsst.afw.geom as afwGeom
33import lsst.afw.image as afwImage
34import lsst.afw.table as afwTable
35from lsst.afw.fits import readMetadata
36import lsst.afw.cameraGeom as afwCameraGeom
37import lsst.log as lsstLog
38import lsst.pex.exceptions as pexExcept
39from .exposureIdInfo import ExposureIdInfo
40from .makeRawVisitInfo import MakeRawVisitInfo
41from .utils import createInitialSkyWcs, InitialSkyWcsError
42from lsst.utils import getPackageDir
44__all__ = ["CameraMapper", "exposureFromImage"]
47class CameraMapper(dafPersist.Mapper):
49 """CameraMapper is a base class for mappers that handle images from a
50 camera and products derived from them. This provides an abstraction layer
51 between the data on disk and the code.
53 Public methods: keys, queryMetadata, getDatasetTypes, map,
54 canStandardize, standardize
56 Mappers for specific data sources (e.g., CFHT Megacam, LSST
57 simulations, etc.) should inherit this class.
59 The CameraMapper manages datasets within a "root" directory. Note that
60 writing to a dataset present in the input root will hide the existing
61 dataset but not overwrite it. See #2160 for design discussion.
63 A camera is assumed to consist of one or more rafts, each composed of
64 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
65 (amps). A camera is also assumed to have a camera geometry description
66 (CameraGeom object) as a policy file, a filter description (Filter class
67 static configuration) as another policy file.
69 Information from the camera geometry and defects are inserted into all
70 Exposure objects returned.
72 The mapper uses one or two registries to retrieve metadata about the
73 images. The first is a registry of all raw exposures. This must contain
74 the time of the observation. One or more tables (or the equivalent)
75 within the registry are used to look up data identifier components that
76 are not specified by the user (e.g. filter) and to return results for
77 metadata queries. The second is an optional registry of all calibration
78 data. This should contain validity start and end entries for each
79 calibration dataset in the same timescale as the observation time.
81 Subclasses will typically set MakeRawVisitInfoClass and optionally the
82 metadata translator class:
84 MakeRawVisitInfoClass: a class variable that points to a subclass of
85 MakeRawVisitInfo, a functor that creates an
86 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
88 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
89 class to use for fixing metadata values. If it is not set an attempt
90 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
91 that the metadata fixup will try to infer the translator class from the
92 header itself.
94 Subclasses must provide the following methods:
96 _extractDetectorName(self, dataId): returns the detector name for a CCD
97 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
98 a dataset identifier referring to that CCD or a subcomponent of it.
100 _computeCcdExposureId(self, dataId): see below
102 _computeCoaddExposureId(self, dataId, singleFilter): see below
104 Subclasses may also need to override the following methods:
106 _transformId(self, dataId): transformation of a data identifier
107 from colloquial usage (e.g., "ccdname") to proper/actual usage
108 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
109 commas). The default implementation does nothing. Note that this
110 method should not modify its input parameter.
112 getShortCcdName(self, ccdName): a static method that returns a shortened
113 name suitable for use as a filename. The default version converts spaces
114 to underscores.
116 _mapActualToPath(self, template, actualId): convert a template path to an
117 actual path, using the actual dataset identifier.
119 The mapper's behaviors are largely specified by the policy file.
120 See the MapperDictionary.paf for descriptions of the available items.
122 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
123 mappings (see Mappings class).
125 Common default mappings for all subclasses can be specified in the
126 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
127 provides a simple way to add a product to all camera mappers.
129 Functions to map (provide a path to the data given a dataset
130 identifier dictionary) and standardize (convert data into some standard
131 format or type) may be provided in the subclass as "map_{dataset type}"
132 and "std_{dataset type}", respectively.
134 If non-Exposure datasets cannot be retrieved using standard
135 daf_persistence methods alone, a "bypass_{dataset type}" function may be
136 provided in the subclass to return the dataset instead of using the
137 "datasets" subpolicy.
139 Implementations of map_camera and bypass_camera that should typically be
140 sufficient are provided in this base class.
142 Notes
143 -----
144 .. todo::
146 Instead of auto-loading the camera at construction time, load it from
147 the calibration registry
149 Parameters
150 ----------
151 policy : daf_persistence.Policy,
152 Policy with per-camera defaults already merged.
153 repositoryDir : string
154 Policy repository for the subclassing module (obtained with
155 getRepositoryPath() on the per-camera default dictionary).
156 root : string, optional
157 Path to the root directory for data.
158 registry : string, optional
159 Path to registry with data's metadata.
160 calibRoot : string, optional
161 Root directory for calibrations.
162 calibRegistry : string, optional
163 Path to registry with calibrations' metadata.
164 provided : list of string, optional
165 Keys provided by the mapper.
166 parentRegistry : Registry subclass, optional
167 Registry from a parent repository that may be used to look up
168 data's metadata.
169 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
170 The configuration information for the repository this mapper is
171 being used with.
172 """
173 packageName = None
175 # a class or subclass of MakeRawVisitInfo, a functor that makes an
176 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
177 MakeRawVisitInfoClass = MakeRawVisitInfo
179 # a class or subclass of PupilFactory
180 PupilFactoryClass = afwCameraGeom.PupilFactory
182 # Class to use for metadata translations
183 translatorClass = None
185 def __init__(self, policy, repositoryDir,
186 root=None, registry=None, calibRoot=None, calibRegistry=None,
187 provided=None, parentRegistry=None, repositoryCfg=None):
189 dafPersist.Mapper.__init__(self)
191 self.log = lsstLog.Log.getLogger("CameraMapper")
193 if root:
194 self.root = root
195 elif repositoryCfg:
196 self.root = repositoryCfg.root
197 else:
198 self.root = None
200 repoPolicy = repositoryCfg.policy if repositoryCfg else None
201 if repoPolicy is not None:
202 policy.update(repoPolicy)
204 # Levels
205 self.levels = dict()
206 if 'levels' in policy:
207 levelsPolicy = policy['levels']
208 for key in levelsPolicy.names(True):
209 self.levels[key] = set(levelsPolicy.asArray(key))
210 self.defaultLevel = policy['defaultLevel']
211 self.defaultSubLevels = dict()
212 if 'defaultSubLevels' in policy:
213 self.defaultSubLevels = policy['defaultSubLevels']
215 # Root directories
216 if root is None:
217 root = "."
218 root = dafPersist.LogicalLocation(root).locString()
220 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
222 # If the calibRoot is passed in, use that. If not and it's indicated in
223 # the policy, use that. And otherwise, the calibs are in the regular
224 # root.
225 # If the location indicated by the calib root does not exist, do not
226 # create it.
227 calibStorage = None
228 if calibRoot is not None:
229 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
230 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
231 create=False)
232 else:
233 calibRoot = policy.get('calibRoot', None)
234 if calibRoot:
235 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
236 create=False)
237 if calibStorage is None:
238 calibStorage = self.rootStorage
240 self.root = root
242 # Registries
243 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
244 self.rootStorage, searchParents=False,
245 posixIfNoSql=(not parentRegistry))
246 if not self.registry:
247 self.registry = parentRegistry
248 needCalibRegistry = policy.get('needCalibRegistry', None)
249 if needCalibRegistry:
250 if calibStorage:
251 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
252 "calibRegistryPath", calibStorage,
253 posixIfNoSql=False) # NB never use posix for calibs
254 else:
255 raise RuntimeError(
256 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
257 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
258 else:
259 self.calibRegistry = None
261 # Dict of valid keys and their value types
262 self.keyDict = dict()
264 self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
265 self._initWriteRecipes()
267 # Camera geometry
268 self.cameraDataLocation = None # path to camera geometry config file
269 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
271 # Filter translation table
272 self.filters = None
274 # verify that the class variable packageName is set before attempting
275 # to instantiate an instance
276 if self.packageName is None:
277 raise ValueError('class variable packageName must not be None')
279 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log)
281 # Assign a metadata translator if one has not been defined by
282 # subclass. We can sometimes infer one from the RawVisitInfo
283 # class.
284 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
285 self.translatorClass = self.makeRawVisitInfo.metadataTranslator
287 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
288 """Initialize mappings
290 For each of the dataset types that we want to be able to read, there
291 are methods that can be created to support them:
292 * map_<dataset> : determine the path for dataset
293 * std_<dataset> : standardize the retrieved dataset
294 * bypass_<dataset> : retrieve the dataset (bypassing the usual
295 retrieval machinery)
296 * query_<dataset> : query the registry
298 Besides the dataset types explicitly listed in the policy, we create
299 additional, derived datasets for additional conveniences,
300 e.g., reading the header of an image, retrieving only the size of a
301 catalog.
303 Parameters
304 ----------
305 policy : `lsst.daf.persistence.Policy`
306 Policy with per-camera defaults already merged
307 rootStorage : `Storage subclass instance`
308 Interface to persisted repository data.
309 calibRoot : `Storage subclass instance`
310 Interface to persisted calib repository data
311 provided : `list` of `str`
312 Keys provided by the mapper
313 """
314 # Sub-dictionaries (for exposure/calibration/dataset types)
315 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
316 "obs_base", "ImageMappingDefaults.yaml", "policy"))
317 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318 "obs_base", "ExposureMappingDefaults.yaml", "policy"))
319 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320 "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
321 dsMappingPolicy = dafPersist.Policy()
323 # Mappings
324 mappingList = (
325 ("images", imgMappingPolicy, ImageMapping),
326 ("exposures", expMappingPolicy, ExposureMapping),
327 ("calibrations", calMappingPolicy, CalibrationMapping),
328 ("datasets", dsMappingPolicy, DatasetMapping)
329 )
330 self.mappings = dict()
331 for name, defPolicy, cls in mappingList:
332 if name in policy:
333 datasets = policy[name]
335 # Centrally-defined datasets
336 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
337 if os.path.exists(defaultsPath):
338 datasets.merge(dafPersist.Policy(defaultsPath))
340 mappings = dict()
341 setattr(self, name, mappings)
342 for datasetType in datasets.names(True):
343 subPolicy = datasets[datasetType]
344 subPolicy.merge(defPolicy)
346 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
347 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
348 subPolicy=subPolicy):
349 components = subPolicy.get('composite')
350 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
351 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
352 python = subPolicy['python']
353 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
354 disassembler=disassembler,
355 python=python,
356 dataId=dataId,
357 mapper=self)
358 for name, component in components.items():
359 butlerComposite.add(id=name,
360 datasetType=component.get('datasetType'),
361 setter=component.get('setter', None),
362 getter=component.get('getter', None),
363 subset=component.get('subset', False),
364 inputOnly=component.get('inputOnly', False))
365 return butlerComposite
366 setattr(self, "map_" + datasetType, compositeClosure)
367 # for now at least, don't set up any other handling for this dataset type.
368 continue
370 if name == "calibrations":
371 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
372 provided=provided, dataRoot=rootStorage)
373 else:
374 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
376 if datasetType in self.mappings:
377 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
378 self.keyDict.update(mapping.keys())
379 mappings[datasetType] = mapping
380 self.mappings[datasetType] = mapping
381 if not hasattr(self, "map_" + datasetType):
382 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
383 return mapping.map(mapper, dataId, write)
384 setattr(self, "map_" + datasetType, mapClosure)
385 if not hasattr(self, "query_" + datasetType):
386 def queryClosure(format, dataId, mapping=mapping):
387 return mapping.lookup(format, dataId)
388 setattr(self, "query_" + datasetType, queryClosure)
389 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
390 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
391 return mapping.standardize(mapper, item, dataId)
392 setattr(self, "std_" + datasetType, stdClosure)
394 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
395 """Set convenience methods on CameraMapper"""
396 mapName = "map_" + datasetType + "_" + suffix
397 bypassName = "bypass_" + datasetType + "_" + suffix
398 queryName = "query_" + datasetType + "_" + suffix
399 if not hasattr(self, mapName):
400 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
401 if not hasattr(self, bypassName):
402 if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
403 bypassImpl = getattr(self, "bypass_" + datasetType)
404 if bypassImpl is not None:
405 setattr(self, bypassName, bypassImpl)
406 if not hasattr(self, queryName):
407 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
409 # Filename of dataset
410 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
411 [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
412 # Metadata from FITS file
413 if subPolicy["storage"] == "FitsStorage": # a FITS image
414 def getMetadata(datasetType, pythonType, location, dataId):
415 md = readMetadata(location.getLocationsWithRoot()[0])
416 fix_header(md, translator_class=self.translatorClass)
417 return md
419 setMethods("md", bypassImpl=getMetadata)
421 # Add support for configuring FITS compression
422 addName = "add_" + datasetType
423 if not hasattr(self, addName):
424 setattr(self, addName, self.getImageCompressionSettings)
426 if name == "exposures":
427 def getSkyWcs(datasetType, pythonType, location, dataId):
428 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
429 return fitsReader.readWcs()
431 setMethods("wcs", bypassImpl=getSkyWcs)
433 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
434 """Create a SkyWcs from the un-modified raw FITS WCS header keys."""
435 if datasetType[:3] != "raw":
436 raise dafPersist.NoResults("Can only get header WCS for raw exposures.",
437 datasetType, dataId)
438 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
440 setMethods("header_wcs", bypassImpl=getRawHeaderWcs)
442 def getPhotoCalib(datasetType, pythonType, location, dataId):
443 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
444 return fitsReader.readPhotoCalib()
446 setMethods("photoCalib", bypassImpl=getPhotoCalib)
448 def getVisitInfo(datasetType, pythonType, location, dataId):
449 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
450 return fitsReader.readVisitInfo()
452 setMethods("visitInfo", bypassImpl=getVisitInfo)
454 def getFilter(datasetType, pythonType, location, dataId):
455 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
456 return fitsReader.readFilter()
458 setMethods("filter", bypassImpl=getFilter)
460 setMethods("detector",
461 mapImpl=lambda dataId, write=False:
462 dafPersist.ButlerLocation(
463 pythonType="lsst.afw.cameraGeom.CameraConfig",
464 cppType="Config",
465 storageName="Internal",
466 locationList="ignored",
467 dataId=dataId,
468 mapper=self,
469 storage=None,
470 ),
471 bypassImpl=lambda datasetType, pythonType, location, dataId:
472 self.camera[self._extractDetectorName(dataId)]
473 )
475 def getBBox(datasetType, pythonType, location, dataId):
476 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
477 fix_header(md, translator_class=self.translatorClass)
478 return afwImage.bboxFromMetadata(md)
480 setMethods("bbox", bypassImpl=getBBox)
482 elif name == "images":
483 def getBBox(datasetType, pythonType, location, dataId):
484 md = readMetadata(location.getLocationsWithRoot()[0])
485 fix_header(md, translator_class=self.translatorClass)
486 return afwImage.bboxFromMetadata(md)
487 setMethods("bbox", bypassImpl=getBBox)
489 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
491 def getMetadata(datasetType, pythonType, location, dataId):
492 md = readMetadata(os.path.join(location.getStorage().root,
493 location.getLocations()[0]), hdu=1)
494 fix_header(md, translator_class=self.translatorClass)
495 return md
497 setMethods("md", bypassImpl=getMetadata)
499 # Sub-images
500 if subPolicy["storage"] == "FitsStorage":
501 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
502 subId = dataId.copy()
503 del subId['bbox']
504 loc = mapping.map(mapper, subId, write)
505 bbox = dataId['bbox']
506 llcX = bbox.getMinX()
507 llcY = bbox.getMinY()
508 width = bbox.getWidth()
509 height = bbox.getHeight()
510 loc.additionalData.set('llcX', llcX)
511 loc.additionalData.set('llcY', llcY)
512 loc.additionalData.set('width', width)
513 loc.additionalData.set('height', height)
514 if 'imageOrigin' in dataId:
515 loc.additionalData.set('imageOrigin',
516 dataId['imageOrigin'])
517 return loc
519 def querySubClosure(key, format, dataId, mapping=mapping):
520 subId = dataId.copy()
521 del subId['bbox']
522 return mapping.lookup(format, subId)
523 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
525 if subPolicy["storage"] == "FitsCatalogStorage":
526 # Length of catalog
528 def getLen(datasetType, pythonType, location, dataId):
529 md = readMetadata(os.path.join(location.getStorage().root,
530 location.getLocations()[0]), hdu=1)
531 fix_header(md, translator_class=self.translatorClass)
532 return md["NAXIS2"]
534 setMethods("len", bypassImpl=getLen)
536 # Schema of catalog
537 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
538 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
539 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
540 location.getLocations()[0])))
542 def _computeCcdExposureId(self, dataId):
543 """Compute the 64-bit (long) identifier for a CCD exposure.
545 Subclasses must override
547 Parameters
548 ----------
549 dataId : `dict`
550 Data identifier with visit, ccd.
551 """
552 raise NotImplementedError()
554 def _computeCoaddExposureId(self, dataId, singleFilter):
555 """Compute the 64-bit (long) identifier for a coadd.
557 Subclasses must override
559 Parameters
560 ----------
561 dataId : `dict`
562 Data identifier with tract and patch.
563 singleFilter : `bool`
564 True means the desired ID is for a single-filter coadd, in which
565 case dataIdmust contain filter.
566 """
567 raise NotImplementedError()
569 def _search(self, path):
570 """Search for path in the associated repository's storage.
572 Parameters
573 ----------
574 path : string
575 Path that describes an object in the repository associated with
576 this mapper.
577 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
578 indicator will be stripped when searching and so will match
579 filenames without the HDU indicator, e.g. 'foo.fits'. The path
580 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
582 Returns
583 -------
584 string
585 The path for this object in the repository. Will return None if the
586 object can't be found. If the input argument path contained an HDU
587 indicator, the returned path will also contain the HDU indicator.
588 """
589 return self.rootStorage.search(path)
591 def backup(self, datasetType, dataId):
592 """Rename any existing object with the given type and dataId.
594 The CameraMapper implementation saves objects in a sequence of e.g.:
596 - foo.fits
597 - foo.fits~1
598 - foo.fits~2
600 All of the backups will be placed in the output repo, however, and will
601 not be removed if they are found elsewhere in the _parent chain. This
602 means that the same file will be stored twice if the previous version
603 was found in an input repo.
604 """
606 # Calling PosixStorage directly is not the long term solution in this
607 # function, this is work-in-progress on epic DM-6225. The plan is for
608 # parentSearch to be changed to 'search', and search only the storage
609 # associated with this mapper. All searching of parents will be handled
610 # by traversing the container of repositories in Butler.
612 def firstElement(list):
613 """Get the first element in the list, or None if that can't be
614 done.
615 """
616 return list[0] if list is not None and len(list) else None
618 n = 0
619 newLocation = self.map(datasetType, dataId, write=True)
620 newPath = newLocation.getLocations()[0]
621 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
622 path = firstElement(path)
623 oldPaths = []
624 while path is not None:
625 n += 1
626 oldPaths.append((n, path))
627 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
628 path = firstElement(path)
629 for n, oldPath in reversed(oldPaths):
630 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
632 def keys(self):
633 """Return supported keys.
635 Returns
636 -------
637 iterable
638 List of keys usable in a dataset identifier
639 """
640 return iter(self.keyDict.keys())
642 def getKeys(self, datasetType, level):
643 """Return a dict of supported keys and their value types for a given
644 dataset type at a given level of the key hierarchy.
646 Parameters
647 ----------
648 datasetType : `str`
649 Dataset type or None for all dataset types.
650 level : `str` or None
651 Level or None for all levels or '' for the default level for the
652 camera.
654 Returns
655 -------
656 `dict`
657 Keys are strings usable in a dataset identifier, values are their
658 value types.
659 """
661 # not sure if this is how we want to do this. what if None was intended?
662 if level == '':
663 level = self.getDefaultLevel()
665 if datasetType is None:
666 keyDict = copy.copy(self.keyDict)
667 else:
668 keyDict = self.mappings[datasetType].keys()
669 if level is not None and level in self.levels:
670 keyDict = copy.copy(keyDict)
671 for lev in self.levels[level]:
672 if lev in keyDict:
673 del keyDict[lev]
674 return keyDict
676 def getDefaultLevel(self):
677 return self.defaultLevel
679 def getDefaultSubLevel(self, level):
680 if level in self.defaultSubLevels:
681 return self.defaultSubLevels[level]
682 return None
684 @classmethod
685 def getCameraName(cls):
686 """Return the name of the camera that this CameraMapper is for."""
687 className = str(cls)
688 className = className[className.find('.'):-1]
689 m = re.search(r'(\w+)Mapper', className)
690 if m is None:
691 m = re.search(r"class '[\w.]*?(\w+)'", className)
692 name = m.group(1)
693 return name[:1].lower() + name[1:] if name else ''
695 @classmethod
696 def getPackageName(cls):
697 """Return the name of the package containing this CameraMapper."""
698 if cls.packageName is None:
699 raise ValueError('class variable packageName must not be None')
700 return cls.packageName
702 @classmethod
703 def getPackageDir(cls):
704 """Return the base directory of this package"""
705 return getPackageDir(cls.getPackageName())
707 def map_camera(self, dataId, write=False):
708 """Map a camera dataset."""
709 if self.camera is None:
710 raise RuntimeError("No camera dataset available.")
711 actualId = self._transformId(dataId)
712 return dafPersist.ButlerLocation(
713 pythonType="lsst.afw.cameraGeom.CameraConfig",
714 cppType="Config",
715 storageName="ConfigStorage",
716 locationList=self.cameraDataLocation or "ignored",
717 dataId=actualId,
718 mapper=self,
719 storage=self.rootStorage
720 )
722 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
723 """Return the (preloaded) camera object.
724 """
725 if self.camera is None:
726 raise RuntimeError("No camera dataset available.")
727 return self.camera
729 def map_expIdInfo(self, dataId, write=False):
730 return dafPersist.ButlerLocation(
731 pythonType="lsst.obs.base.ExposureIdInfo",
732 cppType=None,
733 storageName="Internal",
734 locationList="ignored",
735 dataId=dataId,
736 mapper=self,
737 storage=self.rootStorage
738 )
740 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
741 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
742 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
743 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
744 return ExposureIdInfo(expId=expId, expBits=expBits)
746 def std_bfKernel(self, item, dataId):
747 """Disable standardization for bfKernel
749 bfKernel is a calibration product that is numpy array,
750 unlike other calibration products that are all images;
751 all calibration images are sent through _standardizeExposure
752 due to CalibrationMapping, but we don't want that to happen to bfKernel
753 """
754 return item
756 def std_raw(self, item, dataId):
757 """Standardize a raw dataset by converting it to an Exposure instead
758 of an Image"""
759 return self._standardizeExposure(self.exposures['raw'], item, dataId,
760 trimmed=False, setVisitInfo=True)
762 def map_skypolicy(self, dataId):
763 """Map a sky policy."""
764 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
765 "Internal", None, None, self,
766 storage=self.rootStorage)
768 def std_skypolicy(self, item, dataId):
769 """Standardize a sky policy by returning the one we use."""
770 return self.skypolicy
772###############################################################################
773#
774# Utility functions
775#
776###############################################################################
778 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
779 posixIfNoSql=True):
780 """Set up a registry (usually SQLite3), trying a number of possible
781 paths.
783 Parameters
784 ----------
785 name : string
786 Name of registry.
787 description: `str`
788 Description of registry (for log messages)
789 path : string
790 Path for registry.
791 policy : string
792 Policy that contains the registry name, used if path is None.
793 policyKey : string
794 Key in policy for registry path.
795 storage : Storage subclass
796 Repository Storage to look in.
797 searchParents : bool, optional
798 True if the search for a registry should follow any Butler v1
799 _parent symlinks.
800 posixIfNoSql : bool, optional
801 If an sqlite registry is not found, will create a posix registry if
802 this is True.
804 Returns
805 -------
806 lsst.daf.persistence.Registry
807 Registry object
808 """
809 if path is None and policyKey in policy:
810 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
811 if os.path.isabs(path):
812 raise RuntimeError("Policy should not indicate an absolute path for registry.")
813 if not storage.exists(path):
814 newPath = storage.instanceSearch(path)
816 newPath = newPath[0] if newPath is not None and len(newPath) else None
817 if newPath is None:
818 self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
819 path)
820 path = newPath
821 else:
822 self.log.warn("Unable to locate registry at policy path: %s", path)
823 path = None
825 # Old Butler API was to indicate the registry WITH the repo folder, New Butler expects the registry to
826 # be in the repo folder. To support Old API, check to see if path starts with root, and if so, strip
827 # root from path. Currently only works with PosixStorage
828 try:
829 root = storage.root
830 if path and (path.startswith(root)):
831 path = path[len(root + '/'):]
832 except AttributeError:
833 pass
835 # determine if there is an sqlite registry and if not, try the posix registry.
836 registry = None
838 def search(filename, description):
839 """Search for file in storage
841 Parameters
842 ----------
843 filename : `str`
844 Filename to search for
845 description : `str`
846 Description of file, for error message.
848 Returns
849 -------
850 path : `str` or `None`
851 Path to file, or None
852 """
853 result = storage.instanceSearch(filename)
854 if result:
855 return result[0]
856 self.log.debug("Unable to locate %s: %s", description, filename)
857 return None
859 # Search for a suitable registry database
860 if path is None:
861 path = search("%s.pgsql" % name, "%s in root" % description)
862 if path is None:
863 path = search("%s.sqlite3" % name, "%s in root" % description)
864 if path is None:
865 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
867 if path is not None:
868 if not storage.exists(path):
869 newPath = storage.instanceSearch(path)
870 newPath = newPath[0] if newPath is not None and len(newPath) else None
871 if newPath is not None:
872 path = newPath
873 localFileObj = storage.getLocalFile(path)
874 self.log.info("Loading %s registry from %s", description, localFileObj.name)
875 registry = dafPersist.Registry.create(localFileObj.name)
876 localFileObj.close()
877 elif not registry and posixIfNoSql:
878 try:
879 self.log.info("Loading Posix %s registry from %s", description, storage.root)
880 registry = dafPersist.PosixRegistry(storage.root)
881 except Exception:
882 registry = None
884 return registry
886 def _transformId(self, dataId):
887 """Generate a standard ID dict from a camera-specific ID dict.
889 Canonical keys include:
890 - amp: amplifier name
891 - ccd: CCD name (in LSST this is a combination of raft and sensor)
892 The default implementation returns a copy of its input.
894 Parameters
895 ----------
896 dataId : `dict`
897 Dataset identifier; this must not be modified
899 Returns
900 -------
901 `dict`
902 Transformed dataset identifier.
903 """
905 return dataId.copy()
907 def _mapActualToPath(self, template, actualId):
908 """Convert a template path to an actual path, using the actual data
909 identifier. This implementation is usually sufficient but can be
910 overridden by the subclass.
912 Parameters
913 ----------
914 template : `str`
915 Template path
916 actualId : `dict`
917 Dataset identifier
919 Returns
920 -------
921 `str`
922 Pathname
923 """
925 try:
926 transformedId = self._transformId(actualId)
927 return template % transformedId
928 except Exception as e:
929 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
931 @staticmethod
932 def getShortCcdName(ccdName):
933 """Convert a CCD name to a form useful as a filename
935 The default implementation converts spaces to underscores.
936 """
937 return ccdName.replace(" ", "_")
939 def _extractDetectorName(self, dataId):
940 """Extract the detector (CCD) name from the dataset identifier.
942 The name in question is the detector name used by lsst.afw.cameraGeom.
944 Parameters
945 ----------
946 dataId : `dict`
947 Dataset identifier.
949 Returns
950 -------
951 `str`
952 Detector name
953 """
954 raise NotImplementedError("No _extractDetectorName() function specified")
956 def _setAmpDetector(self, item, dataId, trimmed=True):
957 """Set the detector object in an Exposure for an amplifier.
959 Defects are also added to the Exposure based on the detector object.
961 Parameters
962 ----------
963 item : `lsst.afw.image.Exposure`
964 Exposure to set the detector in.
965 dataId : `dict`
966 Dataset identifier
967 trimmed : `bool`
968 Should detector be marked as trimmed? (ignored)
969 """
971 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
973 def _setCcdDetector(self, item, dataId, trimmed=True):
974 """Set the detector object in an Exposure for a CCD.
976 Parameters
977 ----------
978 item : `lsst.afw.image.Exposure`
979 Exposure to set the detector in.
980 dataId : `dict`
981 Dataset identifier
982 trimmed : `bool`
983 Should detector be marked as trimmed? (ignored)
984 """
985 if item.getDetector() is not None:
986 return
988 detectorName = self._extractDetectorName(dataId)
989 detector = self.camera[detectorName]
990 item.setDetector(detector)
992 def _setFilter(self, mapping, item, dataId):
993 """Set the filter object in an Exposure. If the Exposure had a FILTER
994 keyword, this was already processed during load. But if it didn't,
995 use the filter from the registry.
997 Parameters
998 ----------
999 mapping : `lsst.obs.base.Mapping`
1000 Where to get the filter from.
1001 item : `lsst.afw.image.Exposure`
1002 Exposure to set the filter in.
1003 dataId : `dict`
1004 Dataset identifier.
1005 """
1007 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI)
1008 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1009 return
1011 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1012 return
1014 actualId = mapping.need(['filter'], dataId)
1015 filterName = actualId['filter']
1016 if self.filters is not None and filterName in self.filters:
1017 filterName = self.filters[filterName]
1018 try:
1019 item.setFilter(afwImage.Filter(filterName))
1020 except pexExcept.NotFoundError:
1021 self.log.warn("Filter %s not defined. Set to UNKNOWN." % (filterName))
1023 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1024 trimmed=True, setVisitInfo=True):
1025 """Default standardization function for images.
1027 This sets the Detector from the camera geometry
1028 and optionally set the Filter. In both cases this saves
1029 having to persist some data in each exposure (or image).
1031 Parameters
1032 ----------
1033 mapping : `lsst.obs.base.Mapping`
1034 Where to get the values from.
1035 item : image-like object
1036 Can be any of lsst.afw.image.Exposure,
1037 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1038 or lsst.afw.image.MaskedImage
1040 dataId : `dict`
1041 Dataset identifier
1042 filter : `bool`
1043 Set filter? Ignored if item is already an exposure
1044 trimmed : `bool`
1045 Should detector be marked as trimmed?
1046 setVisitInfo : `bool`
1047 Should Exposure have its VisitInfo filled out from the metadata?
1049 Returns
1050 -------
1051 `lsst.afw.image.Exposure`
1052 The standardized Exposure.
1053 """
1054 try:
1055 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log,
1056 setVisitInfo=setVisitInfo)
1057 except Exception as e:
1058 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1059 raise
1061 if mapping.level.lower() == "amp":
1062 self._setAmpDetector(exposure, dataId, trimmed)
1063 elif mapping.level.lower() == "ccd":
1064 self._setCcdDetector(exposure, dataId, trimmed)
1066 # We can only create a WCS if it doesn't already have one and
1067 # we have either a VisitInfo or exposure metadata.
1068 # Do not calculate a WCS if this is an amplifier exposure
1069 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \
1070 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()):
1071 self._createInitialSkyWcs(exposure)
1073 if filter:
1074 self._setFilter(mapping, exposure, dataId)
1076 return exposure
1078 def _createSkyWcsFromMetadata(self, exposure):
1079 """Create a SkyWcs from the FITS header metadata in an Exposure.
1081 Parameters
1082 ----------
1083 exposure : `lsst.afw.image.Exposure`
1084 The exposure to get metadata from, and attach the SkyWcs to.
1085 """
1086 metadata = exposure.getMetadata()
1087 fix_header(metadata, translator_class=self.translatorClass)
1088 try:
1089 wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1090 exposure.setWcs(wcs)
1091 except pexExcept.TypeError as e:
1092 # See DM-14372 for why this is debug and not warn (e.g. calib files without wcs metadata).
1093 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:"
1094 " %s", e.args[0])
1095 # ensure any WCS values stripped from the metadata are removed in the exposure
1096 exposure.setMetadata(metadata)
1098 def _createInitialSkyWcs(self, exposure):
1099 """Create a SkyWcs from the boresight and camera geometry.
1101 If the boresight or camera geometry do not support this method of
1102 WCS creation, this falls back on the header metadata-based version
1103 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1105 Parameters
1106 ----------
1107 exposure : `lsst.afw.image.Exposure`
1108 The exposure to get data from, and attach the SkyWcs to.
1109 """
1110 # Always use try to use metadata first, to strip WCS keys from it.
1111 self._createSkyWcsFromMetadata(exposure)
1113 if exposure.getInfo().getVisitInfo() is None:
1114 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1115 self.log.warn(msg)
1116 return
1117 try:
1118 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1119 exposure.setWcs(newSkyWcs)
1120 except InitialSkyWcsError as e:
1121 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1122 self.log.warn(msg, e)
1123 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1124 if e.__context__ is not None:
1125 self.log.debug("Root-cause Exception was: %s",
1126 traceback.TracebackException.from_exception(e.__context__))
1128 def _makeCamera(self, policy, repositoryDir):
1129 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1130 the camera geometry
1132 Also set self.cameraDataLocation, if relevant (else it can be left
1133 None).
1135 This implementation assumes that policy contains an entry "camera"
1136 that points to the subdirectory in this package of camera data;
1137 specifically, that subdirectory must contain:
1138 - a file named `camera.py` that contains persisted camera config
1139 - ampInfo table FITS files, as required by
1140 lsst.afw.cameraGeom.makeCameraFromPath
1142 Parameters
1143 ----------
1144 policy : `lsst.daf.persistence.Policy`
1145 Policy with per-camera defaults already merged
1146 (PexPolicy only for backward compatibility).
1147 repositoryDir : `str`
1148 Policy repository for the subclassing module (obtained with
1149 getRepositoryPath() on the per-camera default dictionary).
1150 """
1151 if 'camera' not in policy:
1152 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1153 cameraDataSubdir = policy['camera']
1154 self.cameraDataLocation = os.path.normpath(
1155 os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1156 cameraConfig = afwCameraGeom.CameraConfig()
1157 cameraConfig.load(self.cameraDataLocation)
1158 ampInfoPath = os.path.dirname(self.cameraDataLocation)
1159 return afwCameraGeom.makeCameraFromPath(
1160 cameraConfig=cameraConfig,
1161 ampInfoPath=ampInfoPath,
1162 shortNameFunc=self.getShortCcdName,
1163 pupilFactoryClass=self.PupilFactoryClass
1164 )
1166 def getRegistry(self):
1167 """Get the registry used by this mapper.
1169 Returns
1170 -------
1171 Registry or None
1172 The registry used by this mapper for this mapper's repository.
1173 """
1174 return self.registry
1176 def getImageCompressionSettings(self, datasetType, dataId):
1177 """Stuff image compression settings into a daf.base.PropertySet
1179 This goes into the ButlerLocation's "additionalData", which gets
1180 passed into the boost::persistence framework.
1182 Parameters
1183 ----------
1184 datasetType : `str`
1185 Type of dataset for which to get the image compression settings.
1186 dataId : `dict`
1187 Dataset identifier.
1189 Returns
1190 -------
1191 additionalData : `lsst.daf.base.PropertySet`
1192 Image compression settings.
1193 """
1194 mapping = self.mappings[datasetType]
1195 recipeName = mapping.recipe
1196 storageType = mapping.storage
1197 if storageType not in self._writeRecipes:
1198 return dafBase.PropertySet()
1199 if recipeName not in self._writeRecipes[storageType]:
1200 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1201 (datasetType, storageType, recipeName))
1202 recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1203 seed = hash(tuple(dataId.items())) % 2**31
1204 for plane in ("image", "mask", "variance"):
1205 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1206 recipe.set(plane + ".scaling.seed", seed)
1207 return recipe
1209 def _initWriteRecipes(self):
1210 """Read the recipes for writing files
1212 These recipes are currently used for configuring FITS compression,
1213 but they could have wider uses for configuring different flavors
1214 of the storage types. A recipe is referred to by a symbolic name,
1215 which has associated settings. These settings are stored as a
1216 `PropertySet` so they can easily be passed down to the
1217 boost::persistence framework as the "additionalData" parameter.
1219 The list of recipes is written in YAML. A default recipe and
1220 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1221 and these may be overridden or supplemented by the individual obs_*
1222 packages' own policy/writeRecipes.yaml files.
1224 Recipes are grouped by the storage type. Currently, only the
1225 ``FitsStorage`` storage type uses recipes, which uses it to
1226 configure FITS image compression.
1228 Each ``FitsStorage`` recipe for FITS compression should define
1229 "image", "mask" and "variance" entries, each of which may contain
1230 "compression" and "scaling" entries. Defaults will be provided for
1231 any missing elements under "compression" and "scaling".
1233 The allowed entries under "compression" are:
1235 * algorithm (string): compression algorithm to use
1236 * rows (int): number of rows per tile (0 = entire dimension)
1237 * columns (int): number of columns per tile (0 = entire dimension)
1238 * quantizeLevel (float): cfitsio quantization level
1240 The allowed entries under "scaling" are:
1242 * algorithm (string): scaling algorithm to use
1243 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1244 * fuzz (bool): fuzz the values when quantising floating-point values?
1245 * seed (long): seed for random number generator when fuzzing
1246 * maskPlanes (list of string): mask planes to ignore when doing
1247 statistics
1248 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1249 * quantizePad: number of stdev to allow on the low side (for
1250 STDEV_POSITIVE/NEGATIVE)
1251 * bscale: manually specified BSCALE (for MANUAL scaling)
1252 * bzero: manually specified BSCALE (for MANUAL scaling)
1254 A very simple example YAML recipe:
1256 FitsStorage:
1257 default:
1258 image: &default
1259 compression:
1260 algorithm: GZIP_SHUFFLE
1261 mask: *default
1262 variance: *default
1263 """
1264 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1265 recipes = dafPersist.Policy(recipesFile)
1266 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1267 validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1268 if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1269 supplements = dafPersist.Policy(supplementsFile)
1270 # Don't allow overrides, only supplements
1271 for entry in validationMenu:
1272 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1273 if intersection:
1274 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1275 (supplementsFile, entry, recipesFile, intersection))
1276 recipes.update(supplements)
1278 self._writeRecipes = {}
1279 for storageType in recipes.names(True):
1280 if "default" not in recipes[storageType]:
1281 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1282 (storageType, recipesFile))
1283 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1286def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1287 """Generate an Exposure from an image-like object
1289 If the image is a DecoratedImage then also set its WCS and metadata
1290 (Image and MaskedImage are missing the necessary metadata
1291 and Exposure already has those set)
1293 Parameters
1294 ----------
1295 image : Image-like object
1296 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1297 Exposure.
1299 Returns
1300 -------
1301 `lsst.afw.image.Exposure`
1302 Exposure containing input image.
1303 """
1304 translatorClass = None
1305 if mapper is not None:
1306 translatorClass = mapper.translatorClass
1308 metadata = None
1309 if isinstance(image, afwImage.MaskedImage):
1310 exposure = afwImage.makeExposure(image)
1311 elif isinstance(image, afwImage.DecoratedImage):
1312 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1313 metadata = image.getMetadata()
1314 fix_header(metadata, translator_class=translatorClass)
1315 exposure.setMetadata(metadata)
1316 elif isinstance(image, afwImage.Exposure):
1317 exposure = image
1318 metadata = exposure.getMetadata()
1319 fix_header(metadata, translator_class=translatorClass)
1320 else: # Image
1321 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1323 # set VisitInfo if we can
1324 if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1325 if metadata is not None:
1326 if mapper is None:
1327 if not logger:
1328 logger = lsstLog.Log.getLogger("CameraMapper")
1329 logger.warn("I can only set the VisitInfo if you provide a mapper")
1330 else:
1331 exposureId = mapper._computeCcdExposureId(dataId)
1332 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1334 exposure.getInfo().setVisitInfo(visitInfo)
1336 return exposure
1339def validateRecipeFitsStorage(recipes):
1340 """Validate recipes for FitsStorage
1342 The recipes are supplemented with default values where appropriate.
1344 TODO: replace this custom validation code with Cerberus (DM-11846)
1346 Parameters
1347 ----------
1348 recipes : `lsst.daf.persistence.Policy`
1349 FitsStorage recipes to validate.
1351 Returns
1352 -------
1353 validated : `lsst.daf.base.PropertySet`
1354 Validated FitsStorage recipe.
1356 Raises
1357 ------
1358 `RuntimeError`
1359 If validation fails.
1360 """
1361 # Schemas define what should be there, and the default values (and by the default
1362 # value, the expected type).
1363 compressionSchema = {
1364 "algorithm": "NONE",
1365 "rows": 1,
1366 "columns": 0,
1367 "quantizeLevel": 0.0,
1368 }
1369 scalingSchema = {
1370 "algorithm": "NONE",
1371 "bitpix": 0,
1372 "maskPlanes": ["NO_DATA"],
1373 "seed": 0,
1374 "quantizeLevel": 4.0,
1375 "quantizePad": 5.0,
1376 "fuzz": True,
1377 "bscale": 1.0,
1378 "bzero": 0.0,
1379 }
1381 def checkUnrecognized(entry, allowed, description):
1382 """Check to see if the entry contains unrecognised keywords"""
1383 unrecognized = set(entry.keys()) - set(allowed)
1384 if unrecognized:
1385 raise RuntimeError(
1386 "Unrecognized entries when parsing image compression recipe %s: %s" %
1387 (description, unrecognized))
1389 validated = {}
1390 for name in recipes.names(True):
1391 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1392 rr = dafBase.PropertySet()
1393 validated[name] = rr
1394 for plane in ("image", "mask", "variance"):
1395 checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1396 name + "->" + plane)
1398 for settings, schema in (("compression", compressionSchema),
1399 ("scaling", scalingSchema)):
1400 prefix = plane + "." + settings
1401 if settings not in recipes[name][plane]:
1402 for key in schema:
1403 rr.set(prefix + "." + key, schema[key])
1404 continue
1405 entry = recipes[name][plane][settings]
1406 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1407 for key in schema:
1408 value = type(schema[key])(entry[key]) if key in entry else schema[key]
1409 rr.set(prefix + "." + key, value)
1410 return validated