Coverage for python/lsst/obs/base/cameraMapper.py : 9%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import copy
23import os
24import re
25import traceback
26import warnings
27import weakref
29from astro_metadata_translator import fix_header
30from lsst.utils import doImport
31import lsst.daf.persistence as dafPersist
32from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
33import lsst.daf.base as dafBase
34import lsst.afw.geom as afwGeom
35import lsst.afw.image as afwImage
36import lsst.afw.table as afwTable
37from lsst.afw.fits import readMetadata
38import lsst.afw.cameraGeom as afwCameraGeom
39import lsst.log as lsstLog
40import lsst.pex.exceptions as pexExcept
41from .exposureIdInfo import ExposureIdInfo
42from .makeRawVisitInfo import MakeRawVisitInfo
43from .utils import createInitialSkyWcs, InitialSkyWcsError
44from lsst.utils import getPackageDir
45from ._instrument import Instrument
47__all__ = ["CameraMapper", "exposureFromImage"]
50class CameraMapper(dafPersist.Mapper):
52 """CameraMapper is a base class for mappers that handle images from a
53 camera and products derived from them. This provides an abstraction layer
54 between the data on disk and the code.
56 Public methods: keys, queryMetadata, getDatasetTypes, map,
57 canStandardize, standardize
59 Mappers for specific data sources (e.g., CFHT Megacam, LSST
60 simulations, etc.) should inherit this class.
62 The CameraMapper manages datasets within a "root" directory. Note that
63 writing to a dataset present in the input root will hide the existing
64 dataset but not overwrite it. See #2160 for design discussion.
66 A camera is assumed to consist of one or more rafts, each composed of
67 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
68 (amps). A camera is also assumed to have a camera geometry description
69 (CameraGeom object) as a policy file, a filter description (Filter class
70 static configuration) as another policy file.
72 Information from the camera geometry and defects are inserted into all
73 Exposure objects returned.
75 The mapper uses one or two registries to retrieve metadata about the
76 images. The first is a registry of all raw exposures. This must contain
77 the time of the observation. One or more tables (or the equivalent)
78 within the registry are used to look up data identifier components that
79 are not specified by the user (e.g. filter) and to return results for
80 metadata queries. The second is an optional registry of all calibration
81 data. This should contain validity start and end entries for each
82 calibration dataset in the same timescale as the observation time.
84 Subclasses will typically set MakeRawVisitInfoClass and optionally the
85 metadata translator class:
87 MakeRawVisitInfoClass: a class variable that points to a subclass of
88 MakeRawVisitInfo, a functor that creates an
89 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
91 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
92 class to use for fixing metadata values. If it is not set an attempt
93 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
94 that the metadata fixup will try to infer the translator class from the
95 header itself.
97 Subclasses must provide the following methods:
99 _extractDetectorName(self, dataId): returns the detector name for a CCD
100 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
101 a dataset identifier referring to that CCD or a subcomponent of it.
103 _computeCcdExposureId(self, dataId): see below
105 _computeCoaddExposureId(self, dataId, singleFilter): see below
107 Subclasses may also need to override the following methods:
109 _transformId(self, dataId): transformation of a data identifier
110 from colloquial usage (e.g., "ccdname") to proper/actual usage
111 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
112 commas). The default implementation does nothing. Note that this
113 method should not modify its input parameter.
115 getShortCcdName(self, ccdName): a static method that returns a shortened
116 name suitable for use as a filename. The default version converts spaces
117 to underscores.
119 _mapActualToPath(self, template, actualId): convert a template path to an
120 actual path, using the actual dataset identifier.
122 The mapper's behaviors are largely specified by the policy file.
123 See the MapperDictionary.paf for descriptions of the available items.
125 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
126 mappings (see Mappings class).
128 Common default mappings for all subclasses can be specified in the
129 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
130 provides a simple way to add a product to all camera mappers.
132 Functions to map (provide a path to the data given a dataset
133 identifier dictionary) and standardize (convert data into some standard
134 format or type) may be provided in the subclass as "map_{dataset type}"
135 and "std_{dataset type}", respectively.
137 If non-Exposure datasets cannot be retrieved using standard
138 daf_persistence methods alone, a "bypass_{dataset type}" function may be
139 provided in the subclass to return the dataset instead of using the
140 "datasets" subpolicy.
142 Implementations of map_camera and bypass_camera that should typically be
143 sufficient are provided in this base class.
145 Notes
146 -----
147 .. todo::
149 Instead of auto-loading the camera at construction time, load it from
150 the calibration registry
152 Parameters
153 ----------
154 policy : daf_persistence.Policy,
155 Policy with per-camera defaults already merged.
156 repositoryDir : string
157 Policy repository for the subclassing module (obtained with
158 getRepositoryPath() on the per-camera default dictionary).
159 root : string, optional
160 Path to the root directory for data.
161 registry : string, optional
162 Path to registry with data's metadata.
163 calibRoot : string, optional
164 Root directory for calibrations.
165 calibRegistry : string, optional
166 Path to registry with calibrations' metadata.
167 provided : list of string, optional
168 Keys provided by the mapper.
169 parentRegistry : Registry subclass, optional
170 Registry from a parent repository that may be used to look up
171 data's metadata.
172 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
173 The configuration information for the repository this mapper is
174 being used with.
175 """
176 packageName = None
178 # a class or subclass of MakeRawVisitInfo, a functor that makes an
179 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
180 MakeRawVisitInfoClass = MakeRawVisitInfo
182 # a class or subclass of PupilFactory
183 PupilFactoryClass = afwCameraGeom.PupilFactory
185 # Class to use for metadata translations
186 translatorClass = None
188 # Gen3 instrument corresponding to this mapper
189 # Can be a class or a string with the full name of the class
190 _gen3instrument = None
192 def __init__(self, policy, repositoryDir,
193 root=None, registry=None, calibRoot=None, calibRegistry=None,
194 provided=None, parentRegistry=None, repositoryCfg=None):
196 dafPersist.Mapper.__init__(self)
198 self.log = lsstLog.Log.getLogger("CameraMapper")
200 if root:
201 self.root = root
202 elif repositoryCfg:
203 self.root = repositoryCfg.root
204 else:
205 self.root = None
207 repoPolicy = repositoryCfg.policy if repositoryCfg else None
208 if repoPolicy is not None:
209 policy.update(repoPolicy)
211 # Levels
212 self.levels = dict()
213 if 'levels' in policy:
214 levelsPolicy = policy['levels']
215 for key in levelsPolicy.names(True):
216 self.levels[key] = set(levelsPolicy.asArray(key))
217 self.defaultLevel = policy['defaultLevel']
218 self.defaultSubLevels = dict()
219 if 'defaultSubLevels' in policy:
220 self.defaultSubLevels = policy['defaultSubLevels']
222 # Root directories
223 if root is None:
224 root = "."
225 root = dafPersist.LogicalLocation(root).locString()
227 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
229 # If the calibRoot is passed in, use that. If not and it's indicated in
230 # the policy, use that. And otherwise, the calibs are in the regular
231 # root.
232 # If the location indicated by the calib root does not exist, do not
233 # create it.
234 calibStorage = None
235 if calibRoot is not None:
236 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
237 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
238 create=False)
239 else:
240 calibRoot = policy.get('calibRoot', None)
241 if calibRoot:
242 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
243 create=False)
244 if calibStorage is None:
245 calibStorage = self.rootStorage
247 self.root = root
249 # Registries
250 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
251 self.rootStorage, searchParents=False,
252 posixIfNoSql=(not parentRegistry))
253 if not self.registry:
254 self.registry = parentRegistry
255 needCalibRegistry = policy.get('needCalibRegistry', None)
256 if needCalibRegistry:
257 if calibStorage:
258 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
259 "calibRegistryPath", calibStorage,
260 posixIfNoSql=False) # NB never use posix for calibs
261 else:
262 raise RuntimeError(
263 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
264 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
265 else:
266 self.calibRegistry = None
268 # Dict of valid keys and their value types
269 self.keyDict = dict()
271 self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
272 self._initWriteRecipes()
274 # Camera geometry
275 self.cameraDataLocation = None # path to camera geometry config file
276 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
278 # Filter translation table
279 self.filters = None
281 # verify that the class variable packageName is set before attempting
282 # to instantiate an instance
283 if self.packageName is None:
284 raise ValueError('class variable packageName must not be None')
286 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log)
288 # Assign a metadata translator if one has not been defined by
289 # subclass. We can sometimes infer one from the RawVisitInfo
290 # class.
291 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
292 self.translatorClass = self.makeRawVisitInfo.metadataTranslator
294 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
295 """Initialize mappings
297 For each of the dataset types that we want to be able to read, there
298 are methods that can be created to support them:
299 * map_<dataset> : determine the path for dataset
300 * std_<dataset> : standardize the retrieved dataset
301 * bypass_<dataset> : retrieve the dataset (bypassing the usual
302 retrieval machinery)
303 * query_<dataset> : query the registry
305 Besides the dataset types explicitly listed in the policy, we create
306 additional, derived datasets for additional conveniences,
307 e.g., reading the header of an image, retrieving only the size of a
308 catalog.
310 Parameters
311 ----------
312 policy : `lsst.daf.persistence.Policy`
313 Policy with per-camera defaults already merged
314 rootStorage : `Storage subclass instance`
315 Interface to persisted repository data.
316 calibRoot : `Storage subclass instance`
317 Interface to persisted calib repository data
318 provided : `list` of `str`
319 Keys provided by the mapper
320 """
321 # Sub-dictionaries (for exposure/calibration/dataset types)
322 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323 "obs_base", "ImageMappingDefaults.yaml", "policy"))
324 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
325 "obs_base", "ExposureMappingDefaults.yaml", "policy"))
326 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
327 "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
328 dsMappingPolicy = dafPersist.Policy()
330 # Mappings
331 mappingList = (
332 ("images", imgMappingPolicy, ImageMapping),
333 ("exposures", expMappingPolicy, ExposureMapping),
334 ("calibrations", calMappingPolicy, CalibrationMapping),
335 ("datasets", dsMappingPolicy, DatasetMapping)
336 )
337 self.mappings = dict()
338 for name, defPolicy, cls in mappingList:
339 if name in policy:
340 datasets = policy[name]
342 # Centrally-defined datasets
343 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
344 if os.path.exists(defaultsPath):
345 datasets.merge(dafPersist.Policy(defaultsPath))
347 mappings = dict()
348 setattr(self, name, mappings)
349 for datasetType in datasets.names(True):
350 subPolicy = datasets[datasetType]
351 subPolicy.merge(defPolicy)
353 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
354 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
355 subPolicy=subPolicy):
356 components = subPolicy.get('composite')
357 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
358 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
359 python = subPolicy['python']
360 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
361 disassembler=disassembler,
362 python=python,
363 dataId=dataId,
364 mapper=self)
365 for name, component in components.items():
366 butlerComposite.add(id=name,
367 datasetType=component.get('datasetType'),
368 setter=component.get('setter', None),
369 getter=component.get('getter', None),
370 subset=component.get('subset', False),
371 inputOnly=component.get('inputOnly', False))
372 return butlerComposite
373 setattr(self, "map_" + datasetType, compositeClosure)
374 # for now at least, don't set up any other handling for
375 # this dataset type.
376 continue
378 if name == "calibrations":
379 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
380 provided=provided, dataRoot=rootStorage)
381 else:
382 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
384 if datasetType in self.mappings:
385 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
386 self.keyDict.update(mapping.keys())
387 mappings[datasetType] = mapping
388 self.mappings[datasetType] = mapping
389 if not hasattr(self, "map_" + datasetType):
390 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
391 return mapping.map(mapper, dataId, write)
392 setattr(self, "map_" + datasetType, mapClosure)
393 if not hasattr(self, "query_" + datasetType):
394 def queryClosure(format, dataId, mapping=mapping):
395 return mapping.lookup(format, dataId)
396 setattr(self, "query_" + datasetType, queryClosure)
397 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
398 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
399 return mapping.standardize(mapper, item, dataId)
400 setattr(self, "std_" + datasetType, stdClosure)
402 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
403 """Set convenience methods on CameraMapper"""
404 mapName = "map_" + datasetType + "_" + suffix
405 bypassName = "bypass_" + datasetType + "_" + suffix
406 queryName = "query_" + datasetType + "_" + suffix
407 if not hasattr(self, mapName):
408 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
409 if not hasattr(self, bypassName):
410 if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
411 bypassImpl = getattr(self, "bypass_" + datasetType)
412 if bypassImpl is not None:
413 setattr(self, bypassName, bypassImpl)
414 if not hasattr(self, queryName):
415 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
417 # Filename of dataset
418 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
419 [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
420 # Metadata from FITS file
421 if subPolicy["storage"] == "FitsStorage": # a FITS image
422 def getMetadata(datasetType, pythonType, location, dataId):
423 md = readMetadata(location.getLocationsWithRoot()[0])
424 fix_header(md, translator_class=self.translatorClass)
425 return md
427 setMethods("md", bypassImpl=getMetadata)
429 # Add support for configuring FITS compression
430 addName = "add_" + datasetType
431 if not hasattr(self, addName):
432 setattr(self, addName, self.getImageCompressionSettings)
434 if name == "exposures":
435 def getSkyWcs(datasetType, pythonType, location, dataId):
436 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
437 return fitsReader.readWcs()
439 setMethods("wcs", bypassImpl=getSkyWcs)
441 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
442 """Create a SkyWcs from the un-modified raw
443 FITS WCS header keys."""
444 if datasetType[:3] != "raw":
445 raise dafPersist.NoResults("Can only get header WCS for raw exposures.",
446 datasetType, dataId)
447 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
449 setMethods("header_wcs", bypassImpl=getRawHeaderWcs)
451 def getPhotoCalib(datasetType, pythonType, location, dataId):
452 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
453 return fitsReader.readPhotoCalib()
455 setMethods("photoCalib", bypassImpl=getPhotoCalib)
457 def getVisitInfo(datasetType, pythonType, location, dataId):
458 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
459 return fitsReader.readVisitInfo()
461 setMethods("visitInfo", bypassImpl=getVisitInfo)
463 # TODO: deprecate in DM-27170, remove in DM-27177
464 def getFilter(datasetType, pythonType, location, dataId):
465 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
466 return fitsReader.readFilter()
468 setMethods("filter", bypassImpl=getFilter)
470 # TODO: deprecate in DM-27177, remove in DM-27811
471 def getFilterLabel(datasetType, pythonType, location, dataId):
472 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
473 storedFilter = fitsReader.readFilterLabel()
475 # Apply standardization used by full Exposure
476 try:
477 # mapping is local to enclosing scope
478 idFilter = mapping.need(['filter'], dataId)['filter']
479 except dafPersist.NoResults:
480 idFilter = None
481 bestFilter = self._getBestFilter(storedFilter, idFilter)
482 if bestFilter is not None:
483 return bestFilter
484 else:
485 return storedFilter
487 setMethods("filterLabel", bypassImpl=getFilterLabel)
489 setMethods("detector",
490 mapImpl=lambda dataId, write=False:
491 dafPersist.ButlerLocation(
492 pythonType="lsst.afw.cameraGeom.CameraConfig",
493 cppType="Config",
494 storageName="Internal",
495 locationList="ignored",
496 dataId=dataId,
497 mapper=self,
498 storage=None,
499 ),
500 bypassImpl=lambda datasetType, pythonType, location, dataId:
501 self.camera[self._extractDetectorName(dataId)]
502 )
504 def getBBox(datasetType, pythonType, location, dataId):
505 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
506 fix_header(md, translator_class=self.translatorClass)
507 return afwImage.bboxFromMetadata(md)
509 setMethods("bbox", bypassImpl=getBBox)
511 elif name == "images":
512 def getBBox(datasetType, pythonType, location, dataId):
513 md = readMetadata(location.getLocationsWithRoot()[0])
514 fix_header(md, translator_class=self.translatorClass)
515 return afwImage.bboxFromMetadata(md)
516 setMethods("bbox", bypassImpl=getBBox)
518 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
520 def getMetadata(datasetType, pythonType, location, dataId):
521 md = readMetadata(os.path.join(location.getStorage().root,
522 location.getLocations()[0]), hdu=1)
523 fix_header(md, translator_class=self.translatorClass)
524 return md
526 setMethods("md", bypassImpl=getMetadata)
528 # Sub-images
529 if subPolicy["storage"] == "FitsStorage":
530 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
531 subId = dataId.copy()
532 del subId['bbox']
533 loc = mapping.map(mapper, subId, write)
534 bbox = dataId['bbox']
535 llcX = bbox.getMinX()
536 llcY = bbox.getMinY()
537 width = bbox.getWidth()
538 height = bbox.getHeight()
539 loc.additionalData.set('llcX', llcX)
540 loc.additionalData.set('llcY', llcY)
541 loc.additionalData.set('width', width)
542 loc.additionalData.set('height', height)
543 if 'imageOrigin' in dataId:
544 loc.additionalData.set('imageOrigin',
545 dataId['imageOrigin'])
546 return loc
548 def querySubClosure(key, format, dataId, mapping=mapping):
549 subId = dataId.copy()
550 del subId['bbox']
551 return mapping.lookup(format, subId)
552 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
554 if subPolicy["storage"] == "FitsCatalogStorage":
555 # Length of catalog
557 def getLen(datasetType, pythonType, location, dataId):
558 md = readMetadata(os.path.join(location.getStorage().root,
559 location.getLocations()[0]), hdu=1)
560 fix_header(md, translator_class=self.translatorClass)
561 return md["NAXIS2"]
563 setMethods("len", bypassImpl=getLen)
565 # Schema of catalog
566 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
567 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
568 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
569 location.getLocations()[0])))
571 def _computeCcdExposureId(self, dataId):
572 """Compute the 64-bit (long) identifier for a CCD exposure.
574 Subclasses must override
576 Parameters
577 ----------
578 dataId : `dict`
579 Data identifier with visit, ccd.
580 """
581 raise NotImplementedError()
583 def _computeCoaddExposureId(self, dataId, singleFilter):
584 """Compute the 64-bit (long) identifier for a coadd.
586 Subclasses must override
588 Parameters
589 ----------
590 dataId : `dict`
591 Data identifier with tract and patch.
592 singleFilter : `bool`
593 True means the desired ID is for a single-filter coadd, in which
594 case dataIdmust contain filter.
595 """
596 raise NotImplementedError()
598 def _search(self, path):
599 """Search for path in the associated repository's storage.
601 Parameters
602 ----------
603 path : string
604 Path that describes an object in the repository associated with
605 this mapper.
606 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
607 indicator will be stripped when searching and so will match
608 filenames without the HDU indicator, e.g. 'foo.fits'. The path
609 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
611 Returns
612 -------
613 string
614 The path for this object in the repository. Will return None if the
615 object can't be found. If the input argument path contained an HDU
616 indicator, the returned path will also contain the HDU indicator.
617 """
618 return self.rootStorage.search(path)
620 def backup(self, datasetType, dataId):
621 """Rename any existing object with the given type and dataId.
623 The CameraMapper implementation saves objects in a sequence of e.g.:
625 - foo.fits
626 - foo.fits~1
627 - foo.fits~2
629 All of the backups will be placed in the output repo, however, and will
630 not be removed if they are found elsewhere in the _parent chain. This
631 means that the same file will be stored twice if the previous version
632 was found in an input repo.
633 """
635 # Calling PosixStorage directly is not the long term solution in this
636 # function, this is work-in-progress on epic DM-6225. The plan is for
637 # parentSearch to be changed to 'search', and search only the storage
638 # associated with this mapper. All searching of parents will be handled
639 # by traversing the container of repositories in Butler.
641 def firstElement(list):
642 """Get the first element in the list, or None if that can't be
643 done.
644 """
645 return list[0] if list is not None and len(list) else None
647 n = 0
648 newLocation = self.map(datasetType, dataId, write=True)
649 newPath = newLocation.getLocations()[0]
650 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
651 path = firstElement(path)
652 oldPaths = []
653 while path is not None:
654 n += 1
655 oldPaths.append((n, path))
656 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
657 path = firstElement(path)
658 for n, oldPath in reversed(oldPaths):
659 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
661 def keys(self):
662 """Return supported keys.
664 Returns
665 -------
666 iterable
667 List of keys usable in a dataset identifier
668 """
669 return iter(self.keyDict.keys())
671 def getKeys(self, datasetType, level):
672 """Return a dict of supported keys and their value types for a given
673 dataset type at a given level of the key hierarchy.
675 Parameters
676 ----------
677 datasetType : `str`
678 Dataset type or None for all dataset types.
679 level : `str` or None
680 Level or None for all levels or '' for the default level for the
681 camera.
683 Returns
684 -------
685 `dict`
686 Keys are strings usable in a dataset identifier, values are their
687 value types.
688 """
690 # not sure if this is how we want to do this. what if None was
691 # intended?
692 if level == '':
693 level = self.getDefaultLevel()
695 if datasetType is None:
696 keyDict = copy.copy(self.keyDict)
697 else:
698 keyDict = self.mappings[datasetType].keys()
699 if level is not None and level in self.levels:
700 keyDict = copy.copy(keyDict)
701 for lev in self.levels[level]:
702 if lev in keyDict:
703 del keyDict[lev]
704 return keyDict
706 def getDefaultLevel(self):
707 return self.defaultLevel
709 def getDefaultSubLevel(self, level):
710 if level in self.defaultSubLevels:
711 return self.defaultSubLevels[level]
712 return None
714 @classmethod
715 def getCameraName(cls):
716 """Return the name of the camera that this CameraMapper is for."""
717 className = str(cls)
718 className = className[className.find('.'):-1]
719 m = re.search(r'(\w+)Mapper', className)
720 if m is None:
721 m = re.search(r"class '[\w.]*?(\w+)'", className)
722 name = m.group(1)
723 return name[:1].lower() + name[1:] if name else ''
725 @classmethod
726 def getPackageName(cls):
727 """Return the name of the package containing this CameraMapper."""
728 if cls.packageName is None:
729 raise ValueError('class variable packageName must not be None')
730 return cls.packageName
732 @classmethod
733 def getGen3Instrument(cls):
734 """Return the gen3 Instrument class equivalent for this gen2 Mapper.
736 Returns
737 -------
738 instr : `type`
739 A `~lsst.obs.base.Instrument` class.
740 """
741 if cls._gen3instrument is None:
742 raise NotImplementedError("Please provide a specific implementation for your instrument"
743 " to enable conversion of this gen2 repository to gen3")
744 if isinstance(cls._gen3instrument, str):
745 # Given a string to convert to an instrument class
746 cls._gen3instrument = doImport(cls._gen3instrument)
747 if not issubclass(cls._gen3instrument, Instrument):
748 raise ValueError(f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}"
749 " but that is not an lsst.obs.base.Instrument")
750 return cls._gen3instrument
752 @classmethod
753 def getPackageDir(cls):
754 """Return the base directory of this package"""
755 return getPackageDir(cls.getPackageName())
757 def map_camera(self, dataId, write=False):
758 """Map a camera dataset."""
759 if self.camera is None:
760 raise RuntimeError("No camera dataset available.")
761 actualId = self._transformId(dataId)
762 return dafPersist.ButlerLocation(
763 pythonType="lsst.afw.cameraGeom.CameraConfig",
764 cppType="Config",
765 storageName="ConfigStorage",
766 locationList=self.cameraDataLocation or "ignored",
767 dataId=actualId,
768 mapper=self,
769 storage=self.rootStorage
770 )
772 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
773 """Return the (preloaded) camera object.
774 """
775 if self.camera is None:
776 raise RuntimeError("No camera dataset available.")
777 return self.camera
779 def map_expIdInfo(self, dataId, write=False):
780 return dafPersist.ButlerLocation(
781 pythonType="lsst.obs.base.ExposureIdInfo",
782 cppType=None,
783 storageName="Internal",
784 locationList="ignored",
785 dataId=dataId,
786 mapper=self,
787 storage=self.rootStorage
788 )
790 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
791 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
792 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
793 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
794 return ExposureIdInfo(expId=expId, expBits=expBits)
796 def std_bfKernel(self, item, dataId):
797 """Disable standardization for bfKernel
799 bfKernel is a calibration product that is numpy array,
800 unlike other calibration products that are all images;
801 all calibration images are sent through _standardizeExposure
802 due to CalibrationMapping, but we don't want that to happen to bfKernel
803 """
804 return item
806 def std_raw(self, item, dataId):
807 """Standardize a raw dataset by converting it to an Exposure instead
808 of an Image"""
809 return self._standardizeExposure(self.exposures['raw'], item, dataId,
810 trimmed=False, setVisitInfo=True)
812 def map_skypolicy(self, dataId):
813 """Map a sky policy."""
814 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
815 "Internal", None, None, self,
816 storage=self.rootStorage)
818 def std_skypolicy(self, item, dataId):
819 """Standardize a sky policy by returning the one we use."""
820 return self.skypolicy
822###############################################################################
823#
824# Utility functions
825#
826###############################################################################
828 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
829 posixIfNoSql=True):
830 """Set up a registry (usually SQLite3), trying a number of possible
831 paths.
833 Parameters
834 ----------
835 name : string
836 Name of registry.
837 description: `str`
838 Description of registry (for log messages)
839 path : string
840 Path for registry.
841 policy : string
842 Policy that contains the registry name, used if path is None.
843 policyKey : string
844 Key in policy for registry path.
845 storage : Storage subclass
846 Repository Storage to look in.
847 searchParents : bool, optional
848 True if the search for a registry should follow any Butler v1
849 _parent symlinks.
850 posixIfNoSql : bool, optional
851 If an sqlite registry is not found, will create a posix registry if
852 this is True.
854 Returns
855 -------
856 lsst.daf.persistence.Registry
857 Registry object
858 """
859 if path is None and policyKey in policy:
860 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
861 if os.path.isabs(path):
862 raise RuntimeError("Policy should not indicate an absolute path for registry.")
863 if not storage.exists(path):
864 newPath = storage.instanceSearch(path)
866 newPath = newPath[0] if newPath is not None and len(newPath) else None
867 if newPath is None:
868 self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
869 path)
870 path = newPath
871 else:
872 self.log.warn("Unable to locate registry at policy path: %s", path)
873 path = None
875 # Old Butler API was to indicate the registry WITH the repo folder,
876 # New Butler expects the registry to be in the repo folder. To support
877 # Old API, check to see if path starts with root, and if so, strip
878 # root from path. Currently only works with PosixStorage
879 try:
880 root = storage.root
881 if path and (path.startswith(root)):
882 path = path[len(root + '/'):]
883 except AttributeError:
884 pass
886 # determine if there is an sqlite registry and if not, try the posix
887 # registry.
888 registry = None
890 def search(filename, description):
891 """Search for file in storage
893 Parameters
894 ----------
895 filename : `str`
896 Filename to search for
897 description : `str`
898 Description of file, for error message.
900 Returns
901 -------
902 path : `str` or `None`
903 Path to file, or None
904 """
905 result = storage.instanceSearch(filename)
906 if result:
907 return result[0]
908 self.log.debug("Unable to locate %s: %s", description, filename)
909 return None
911 # Search for a suitable registry database
912 if path is None:
913 path = search("%s.pgsql" % name, "%s in root" % description)
914 if path is None:
915 path = search("%s.sqlite3" % name, "%s in root" % description)
916 if path is None:
917 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
919 if path is not None:
920 if not storage.exists(path):
921 newPath = storage.instanceSearch(path)
922 newPath = newPath[0] if newPath is not None and len(newPath) else None
923 if newPath is not None:
924 path = newPath
925 localFileObj = storage.getLocalFile(path)
926 self.log.info("Loading %s registry from %s", description, localFileObj.name)
927 registry = dafPersist.Registry.create(localFileObj.name)
928 localFileObj.close()
929 elif not registry and posixIfNoSql:
930 try:
931 self.log.info("Loading Posix %s registry from %s", description, storage.root)
932 registry = dafPersist.PosixRegistry(storage.root)
933 except Exception:
934 registry = None
936 return registry
938 def _transformId(self, dataId):
939 """Generate a standard ID dict from a camera-specific ID dict.
941 Canonical keys include:
942 - amp: amplifier name
943 - ccd: CCD name (in LSST this is a combination of raft and sensor)
944 The default implementation returns a copy of its input.
946 Parameters
947 ----------
948 dataId : `dict`
949 Dataset identifier; this must not be modified
951 Returns
952 -------
953 `dict`
954 Transformed dataset identifier.
955 """
957 return dataId.copy()
959 def _mapActualToPath(self, template, actualId):
960 """Convert a template path to an actual path, using the actual data
961 identifier. This implementation is usually sufficient but can be
962 overridden by the subclass.
964 Parameters
965 ----------
966 template : `str`
967 Template path
968 actualId : `dict`
969 Dataset identifier
971 Returns
972 -------
973 `str`
974 Pathname
975 """
977 try:
978 transformedId = self._transformId(actualId)
979 return template % transformedId
980 except Exception as e:
981 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
983 @staticmethod
984 def getShortCcdName(ccdName):
985 """Convert a CCD name to a form useful as a filename
987 The default implementation converts spaces to underscores.
988 """
989 return ccdName.replace(" ", "_")
991 def _extractDetectorName(self, dataId):
992 """Extract the detector (CCD) name from the dataset identifier.
994 The name in question is the detector name used by lsst.afw.cameraGeom.
996 Parameters
997 ----------
998 dataId : `dict`
999 Dataset identifier.
1001 Returns
1002 -------
1003 `str`
1004 Detector name
1005 """
1006 raise NotImplementedError("No _extractDetectorName() function specified")
1008 def _setAmpDetector(self, item, dataId, trimmed=True):
1009 """Set the detector object in an Exposure for an amplifier.
1011 Defects are also added to the Exposure based on the detector object.
1013 Parameters
1014 ----------
1015 item : `lsst.afw.image.Exposure`
1016 Exposure to set the detector in.
1017 dataId : `dict`
1018 Dataset identifier
1019 trimmed : `bool`
1020 Should detector be marked as trimmed? (ignored)
1021 """
1023 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1025 def _setCcdDetector(self, item, dataId, trimmed=True):
1026 """Set the detector object in an Exposure for a CCD.
1028 Parameters
1029 ----------
1030 item : `lsst.afw.image.Exposure`
1031 Exposure to set the detector in.
1032 dataId : `dict`
1033 Dataset identifier
1034 trimmed : `bool`
1035 Should detector be marked as trimmed? (ignored)
1036 """
1037 if item.getDetector() is not None:
1038 return
1040 detectorName = self._extractDetectorName(dataId)
1041 detector = self.camera[detectorName]
1042 item.setDetector(detector)
1044 @staticmethod
1045 def _resolveFilters(definitions, idFilter, filterLabel):
1046 """Identify the filter(s) consistent with partial filter information.
1048 Parameters
1049 ----------
1050 definitions : `lsst.obs.base.FilterDefinitionCollection`
1051 The filter definitions in which to search for filters.
1052 idFilter : `str` or `None`
1053 The filter information provided in a data ID.
1054 filterLabel : `lsst.afw.image.FilterLabel` or `None`
1055 The filter information provided by an exposure; may be incomplete.
1057 Returns
1058 -------
1059 filters : `set` [`lsst.obs.base.FilterDefinition`]
1060 The set of filters consistent with ``idFilter``
1061 and ``filterLabel``.
1062 """
1063 # Assume none of the filter constraints actually wrong/contradictory.
1064 # Then taking the intersection of all constraints will give a unique
1065 # result if one exists.
1066 matches = set(definitions)
1067 if idFilter is not None:
1068 matches.intersection_update(definitions.findAll(idFilter))
1069 if filterLabel is not None and filterLabel.hasPhysicalLabel():
1070 matches.intersection_update(definitions.findAll(filterLabel.physicalLabel))
1071 if filterLabel is not None and filterLabel.hasBandLabel():
1072 matches.intersection_update(definitions.findAll(filterLabel.bandLabel))
1073 return matches
1075 def _getBestFilter(self, storedLabel, idFilter):
1076 """Estimate the most complete filter information consistent with the
1077 file or registry.
1079 Parameters
1080 ----------
1081 storedLabel : `lsst.afw.image.FilterLabel` or `None`
1082 The filter previously stored in the file.
1083 idFilter : `str` or `None`
1084 The filter implied by the data ID, if any.
1086 Returns
1087 -------
1088 bestFitler : `lsst.afw.image.FilterLabel` or `None`
1089 The complete filter to describe the dataset. May be equal to
1090 ``storedLabel``. `None` if no recommendation can be generated.
1091 """
1092 try:
1093 # getGen3Instrument returns class; need to construct it.
1094 filterDefinitions = self.getGen3Instrument()().filterDefinitions
1095 except NotImplementedError:
1096 filterDefinitions = None
1098 if filterDefinitions is not None:
1099 definitions = self._resolveFilters(filterDefinitions, idFilter, storedLabel)
1100 self.log.debug("Matching filters for id=%r and label=%r are %s.",
1101 idFilter, storedLabel, definitions)
1102 if len(definitions) == 1:
1103 newLabel = list(definitions)[0].makeFilterLabel()
1104 return newLabel
1105 elif definitions:
1106 self.log.warn("Multiple matches for filter %r with data ID %r.", storedLabel, idFilter)
1107 # Can we at least add a band?
1108 # Never expect multiple definitions with same physical filter.
1109 bands = {d.band for d in definitions} # None counts as separate result!
1110 if len(bands) == 1 and storedLabel is None:
1111 band = list(bands)[0]
1112 return afwImage.FilterLabel(band=band)
1113 else:
1114 return None
1115 else:
1116 # Unknown filter, nothing to be done.
1117 self.log.warn("Cannot reconcile filter %r with data ID %r.", storedLabel, idFilter)
1118 return None
1120 # Not practical to recommend a FilterLabel without filterDefinitions
1122 return None
1124 def _setFilter(self, mapping, item, dataId):
1125 """Set the filter information in an Exposure.
1127 The Exposure should already have had a filter loaded, but the reader
1128 (in ``afw``) had to act on incomplete information. This method
1129 cross-checks the filter against the data ID and the standard list
1130 of filters.
1132 Parameters
1133 ----------
1134 mapping : `lsst.obs.base.Mapping`
1135 Where to get the data ID filter from.
1136 item : `lsst.afw.image.Exposure`
1137 Exposure to set the filter in.
1138 dataId : `dict`
1139 Dataset identifier.
1140 """
1141 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI)
1142 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1143 return
1145 itemFilter = item.getFilterLabel() # may be None
1146 try:
1147 idFilter = mapping.need(['filter'], dataId)['filter']
1148 except dafPersist.NoResults:
1149 idFilter = None
1151 bestFilter = self._getBestFilter(itemFilter, idFilter)
1152 if bestFilter is not None:
1153 if bestFilter != itemFilter:
1154 item.setFilterLabel(bestFilter)
1155 # Already using bestFilter, avoid unnecessary edits
1156 elif itemFilter is None:
1157 # Old Filter cleanup, without the benefit of FilterDefinition
1158 if self.filters is not None and idFilter in self.filters:
1159 idFilter = self.filters[idFilter]
1160 try:
1161 # TODO: remove in DM-27177; at that point may not be able
1162 # to process IDs without FilterDefinition.
1163 with warnings.catch_warnings():
1164 warnings.filterwarnings("ignore", category=FutureWarning)
1165 item.setFilter(afwImage.Filter(idFilter))
1166 except pexExcept.NotFoundError:
1167 self.log.warn("Filter %s not defined. Set to UNKNOWN.", idFilter)
1169 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1170 trimmed=True, setVisitInfo=True):
1171 """Default standardization function for images.
1173 This sets the Detector from the camera geometry
1174 and optionally set the Filter. In both cases this saves
1175 having to persist some data in each exposure (or image).
1177 Parameters
1178 ----------
1179 mapping : `lsst.obs.base.Mapping`
1180 Where to get the values from.
1181 item : image-like object
1182 Can be any of lsst.afw.image.Exposure,
1183 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1184 or lsst.afw.image.MaskedImage
1186 dataId : `dict`
1187 Dataset identifier
1188 filter : `bool`
1189 Set filter? Ignored if item is already an exposure
1190 trimmed : `bool`
1191 Should detector be marked as trimmed?
1192 setVisitInfo : `bool`
1193 Should Exposure have its VisitInfo filled out from the metadata?
1195 Returns
1196 -------
1197 `lsst.afw.image.Exposure`
1198 The standardized Exposure.
1199 """
1200 try:
1201 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log,
1202 setVisitInfo=setVisitInfo)
1203 except Exception as e:
1204 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1205 raise
1207 if mapping.level.lower() == "amp":
1208 self._setAmpDetector(exposure, dataId, trimmed)
1209 elif mapping.level.lower() == "ccd":
1210 self._setCcdDetector(exposure, dataId, trimmed)
1212 # We can only create a WCS if it doesn't already have one and
1213 # we have either a VisitInfo or exposure metadata.
1214 # Do not calculate a WCS if this is an amplifier exposure
1215 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \
1216 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()):
1217 self._createInitialSkyWcs(exposure)
1219 if filter:
1220 self._setFilter(mapping, exposure, dataId)
1222 return exposure
1224 def _createSkyWcsFromMetadata(self, exposure):
1225 """Create a SkyWcs from the FITS header metadata in an Exposure.
1227 Parameters
1228 ----------
1229 exposure : `lsst.afw.image.Exposure`
1230 The exposure to get metadata from, and attach the SkyWcs to.
1231 """
1232 metadata = exposure.getMetadata()
1233 fix_header(metadata, translator_class=self.translatorClass)
1234 try:
1235 wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1236 exposure.setWcs(wcs)
1237 except pexExcept.TypeError as e:
1238 # See DM-14372 for why this is debug and not warn (e.g. calib
1239 # files without wcs metadata).
1240 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:"
1241 " %s", e.args[0])
1242 # ensure any WCS values stripped from the metadata are removed in the
1243 # exposure
1244 exposure.setMetadata(metadata)
1246 def _createInitialSkyWcs(self, exposure):
1247 """Create a SkyWcs from the boresight and camera geometry.
1249 If the boresight or camera geometry do not support this method of
1250 WCS creation, this falls back on the header metadata-based version
1251 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1253 Parameters
1254 ----------
1255 exposure : `lsst.afw.image.Exposure`
1256 The exposure to get data from, and attach the SkyWcs to.
1257 """
1258 # Always use try to use metadata first, to strip WCS keys from it.
1259 self._createSkyWcsFromMetadata(exposure)
1261 if exposure.getInfo().getVisitInfo() is None:
1262 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1263 self.log.warn(msg)
1264 return
1265 try:
1266 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1267 exposure.setWcs(newSkyWcs)
1268 except InitialSkyWcsError as e:
1269 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1270 self.log.warn(msg, e)
1271 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1272 if e.__context__ is not None:
1273 self.log.debug("Root-cause Exception was: %s",
1274 traceback.TracebackException.from_exception(e.__context__))
1276 def _makeCamera(self, policy, repositoryDir):
1277 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1278 the camera geometry
1280 Also set self.cameraDataLocation, if relevant (else it can be left
1281 None).
1283 This implementation assumes that policy contains an entry "camera"
1284 that points to the subdirectory in this package of camera data;
1285 specifically, that subdirectory must contain:
1286 - a file named `camera.py` that contains persisted camera config
1287 - ampInfo table FITS files, as required by
1288 lsst.afw.cameraGeom.makeCameraFromPath
1290 Parameters
1291 ----------
1292 policy : `lsst.daf.persistence.Policy`
1293 Policy with per-camera defaults already merged
1294 (PexPolicy only for backward compatibility).
1295 repositoryDir : `str`
1296 Policy repository for the subclassing module (obtained with
1297 getRepositoryPath() on the per-camera default dictionary).
1298 """
1299 if 'camera' not in policy:
1300 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1301 cameraDataSubdir = policy['camera']
1302 self.cameraDataLocation = os.path.normpath(
1303 os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1304 cameraConfig = afwCameraGeom.CameraConfig()
1305 cameraConfig.load(self.cameraDataLocation)
1306 ampInfoPath = os.path.dirname(self.cameraDataLocation)
1307 return afwCameraGeom.makeCameraFromPath(
1308 cameraConfig=cameraConfig,
1309 ampInfoPath=ampInfoPath,
1310 shortNameFunc=self.getShortCcdName,
1311 pupilFactoryClass=self.PupilFactoryClass
1312 )
1314 def getRegistry(self):
1315 """Get the registry used by this mapper.
1317 Returns
1318 -------
1319 Registry or None
1320 The registry used by this mapper for this mapper's repository.
1321 """
1322 return self.registry
1324 def getImageCompressionSettings(self, datasetType, dataId):
1325 """Stuff image compression settings into a daf.base.PropertySet
1327 This goes into the ButlerLocation's "additionalData", which gets
1328 passed into the boost::persistence framework.
1330 Parameters
1331 ----------
1332 datasetType : `str`
1333 Type of dataset for which to get the image compression settings.
1334 dataId : `dict`
1335 Dataset identifier.
1337 Returns
1338 -------
1339 additionalData : `lsst.daf.base.PropertySet`
1340 Image compression settings.
1341 """
1342 mapping = self.mappings[datasetType]
1343 recipeName = mapping.recipe
1344 storageType = mapping.storage
1345 if storageType not in self._writeRecipes:
1346 return dafBase.PropertySet()
1347 if recipeName not in self._writeRecipes[storageType]:
1348 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1349 (datasetType, storageType, recipeName))
1350 recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1351 seed = hash(tuple(dataId.items())) % 2**31
1352 for plane in ("image", "mask", "variance"):
1353 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1354 recipe.set(plane + ".scaling.seed", seed)
1355 return recipe
1357 def _initWriteRecipes(self):
1358 """Read the recipes for writing files
1360 These recipes are currently used for configuring FITS compression,
1361 but they could have wider uses for configuring different flavors
1362 of the storage types. A recipe is referred to by a symbolic name,
1363 which has associated settings. These settings are stored as a
1364 `PropertySet` so they can easily be passed down to the
1365 boost::persistence framework as the "additionalData" parameter.
1367 The list of recipes is written in YAML. A default recipe and
1368 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1369 and these may be overridden or supplemented by the individual obs_*
1370 packages' own policy/writeRecipes.yaml files.
1372 Recipes are grouped by the storage type. Currently, only the
1373 ``FitsStorage`` storage type uses recipes, which uses it to
1374 configure FITS image compression.
1376 Each ``FitsStorage`` recipe for FITS compression should define
1377 "image", "mask" and "variance" entries, each of which may contain
1378 "compression" and "scaling" entries. Defaults will be provided for
1379 any missing elements under "compression" and "scaling".
1381 The allowed entries under "compression" are:
1383 * algorithm (string): compression algorithm to use
1384 * rows (int): number of rows per tile (0 = entire dimension)
1385 * columns (int): number of columns per tile (0 = entire dimension)
1386 * quantizeLevel (float): cfitsio quantization level
1388 The allowed entries under "scaling" are:
1390 * algorithm (string): scaling algorithm to use
1391 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1392 * fuzz (bool): fuzz the values when quantising floating-point values?
1393 * seed (long): seed for random number generator when fuzzing
1394 * maskPlanes (list of string): mask planes to ignore when doing
1395 statistics
1396 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1397 * quantizePad: number of stdev to allow on the low side (for
1398 STDEV_POSITIVE/NEGATIVE)
1399 * bscale: manually specified BSCALE (for MANUAL scaling)
1400 * bzero: manually specified BSCALE (for MANUAL scaling)
1402 A very simple example YAML recipe:
1404 FitsStorage:
1405 default:
1406 image: &default
1407 compression:
1408 algorithm: GZIP_SHUFFLE
1409 mask: *default
1410 variance: *default
1411 """
1412 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1413 recipes = dafPersist.Policy(recipesFile)
1414 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1415 validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1416 if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1417 supplements = dafPersist.Policy(supplementsFile)
1418 # Don't allow overrides, only supplements
1419 for entry in validationMenu:
1420 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1421 if intersection:
1422 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1423 (supplementsFile, entry, recipesFile, intersection))
1424 recipes.update(supplements)
1426 self._writeRecipes = {}
1427 for storageType in recipes.names(True):
1428 if "default" not in recipes[storageType]:
1429 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1430 (storageType, recipesFile))
1431 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1434def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1435 """Generate an Exposure from an image-like object
1437 If the image is a DecoratedImage then also set its WCS and metadata
1438 (Image and MaskedImage are missing the necessary metadata
1439 and Exposure already has those set)
1441 Parameters
1442 ----------
1443 image : Image-like object
1444 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1445 Exposure.
1447 Returns
1448 -------
1449 `lsst.afw.image.Exposure`
1450 Exposure containing input image.
1451 """
1452 translatorClass = None
1453 if mapper is not None:
1454 translatorClass = mapper.translatorClass
1456 metadata = None
1457 if isinstance(image, afwImage.MaskedImage):
1458 exposure = afwImage.makeExposure(image)
1459 elif isinstance(image, afwImage.DecoratedImage):
1460 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1461 metadata = image.getMetadata()
1462 fix_header(metadata, translator_class=translatorClass)
1463 exposure.setMetadata(metadata)
1464 elif isinstance(image, afwImage.Exposure):
1465 exposure = image
1466 metadata = exposure.getMetadata()
1467 fix_header(metadata, translator_class=translatorClass)
1468 else: # Image
1469 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1471 # set VisitInfo if we can
1472 if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1473 if metadata is not None:
1474 if mapper is None:
1475 if not logger:
1476 logger = lsstLog.Log.getLogger("CameraMapper")
1477 logger.warn("I can only set the VisitInfo if you provide a mapper")
1478 else:
1479 exposureId = mapper._computeCcdExposureId(dataId)
1480 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1482 exposure.getInfo().setVisitInfo(visitInfo)
1484 return exposure
1487def validateRecipeFitsStorage(recipes):
1488 """Validate recipes for FitsStorage
1490 The recipes are supplemented with default values where appropriate.
1492 TODO: replace this custom validation code with Cerberus (DM-11846)
1494 Parameters
1495 ----------
1496 recipes : `lsst.daf.persistence.Policy`
1497 FitsStorage recipes to validate.
1499 Returns
1500 -------
1501 validated : `lsst.daf.base.PropertySet`
1502 Validated FitsStorage recipe.
1504 Raises
1505 ------
1506 `RuntimeError`
1507 If validation fails.
1508 """
1509 # Schemas define what should be there, and the default values (and by the
1510 # default value, the expected type).
1511 compressionSchema = {
1512 "algorithm": "NONE",
1513 "rows": 1,
1514 "columns": 0,
1515 "quantizeLevel": 0.0,
1516 }
1517 scalingSchema = {
1518 "algorithm": "NONE",
1519 "bitpix": 0,
1520 "maskPlanes": ["NO_DATA"],
1521 "seed": 0,
1522 "quantizeLevel": 4.0,
1523 "quantizePad": 5.0,
1524 "fuzz": True,
1525 "bscale": 1.0,
1526 "bzero": 0.0,
1527 }
1529 def checkUnrecognized(entry, allowed, description):
1530 """Check to see if the entry contains unrecognised keywords"""
1531 unrecognized = set(entry.keys()) - set(allowed)
1532 if unrecognized:
1533 raise RuntimeError(
1534 "Unrecognized entries when parsing image compression recipe %s: %s" %
1535 (description, unrecognized))
1537 validated = {}
1538 for name in recipes.names(True):
1539 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1540 rr = dafBase.PropertySet()
1541 validated[name] = rr
1542 for plane in ("image", "mask", "variance"):
1543 checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1544 name + "->" + plane)
1546 for settings, schema in (("compression", compressionSchema),
1547 ("scaling", scalingSchema)):
1548 prefix = plane + "." + settings
1549 if settings not in recipes[name][plane]:
1550 for key in schema:
1551 rr.set(prefix + "." + key, schema[key])
1552 continue
1553 entry = recipes[name][plane][settings]
1554 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1555 for key in schema:
1556 value = type(schema[key])(entry[key]) if key in entry else schema[key]
1557 rr.set(prefix + "." + key, value)
1558 return validated