Coverage for python/lsst/obs/base/cameraMapper.py : 9%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import copy
23import os
24import re
25import traceback
26import warnings
27import weakref
29from astro_metadata_translator import fix_header
30from lsst.utils import doImport
31import lsst.daf.persistence as dafPersist
32from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
33import lsst.daf.base as dafBase
34import lsst.afw.geom as afwGeom
35import lsst.afw.image as afwImage
36import lsst.afw.table as afwTable
37from lsst.afw.fits import readMetadata
38import lsst.afw.cameraGeom as afwCameraGeom
39import lsst.log as lsstLog
40import lsst.pex.exceptions as pexExcept
41from .exposureIdInfo import ExposureIdInfo
42from .makeRawVisitInfo import MakeRawVisitInfo
43from .utils import createInitialSkyWcs, InitialSkyWcsError
44from lsst.utils import getPackageDir
45from ._instrument import Instrument
47__all__ = ["CameraMapper", "exposureFromImage"]
50class CameraMapper(dafPersist.Mapper):
52 """CameraMapper is a base class for mappers that handle images from a
53 camera and products derived from them. This provides an abstraction layer
54 between the data on disk and the code.
56 Public methods: keys, queryMetadata, getDatasetTypes, map,
57 canStandardize, standardize
59 Mappers for specific data sources (e.g., CFHT Megacam, LSST
60 simulations, etc.) should inherit this class.
62 The CameraMapper manages datasets within a "root" directory. Note that
63 writing to a dataset present in the input root will hide the existing
64 dataset but not overwrite it. See #2160 for design discussion.
66 A camera is assumed to consist of one or more rafts, each composed of
67 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
68 (amps). A camera is also assumed to have a camera geometry description
69 (CameraGeom object) as a policy file, a filter description (Filter class
70 static configuration) as another policy file.
72 Information from the camera geometry and defects are inserted into all
73 Exposure objects returned.
75 The mapper uses one or two registries to retrieve metadata about the
76 images. The first is a registry of all raw exposures. This must contain
77 the time of the observation. One or more tables (or the equivalent)
78 within the registry are used to look up data identifier components that
79 are not specified by the user (e.g. filter) and to return results for
80 metadata queries. The second is an optional registry of all calibration
81 data. This should contain validity start and end entries for each
82 calibration dataset in the same timescale as the observation time.
84 Subclasses will typically set MakeRawVisitInfoClass and optionally the
85 metadata translator class:
87 MakeRawVisitInfoClass: a class variable that points to a subclass of
88 MakeRawVisitInfo, a functor that creates an
89 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
91 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
92 class to use for fixing metadata values. If it is not set an attempt
93 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
94 that the metadata fixup will try to infer the translator class from the
95 header itself.
97 Subclasses must provide the following methods:
99 _extractDetectorName(self, dataId): returns the detector name for a CCD
100 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
101 a dataset identifier referring to that CCD or a subcomponent of it.
103 _computeCcdExposureId(self, dataId): see below
105 _computeCoaddExposureId(self, dataId, singleFilter): see below
107 Subclasses may also need to override the following methods:
109 _transformId(self, dataId): transformation of a data identifier
110 from colloquial usage (e.g., "ccdname") to proper/actual usage
111 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
112 commas). The default implementation does nothing. Note that this
113 method should not modify its input parameter.
115 getShortCcdName(self, ccdName): a static method that returns a shortened
116 name suitable for use as a filename. The default version converts spaces
117 to underscores.
119 _mapActualToPath(self, template, actualId): convert a template path to an
120 actual path, using the actual dataset identifier.
122 The mapper's behaviors are largely specified by the policy file.
123 See the MapperDictionary.paf for descriptions of the available items.
125 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
126 mappings (see Mappings class).
128 Common default mappings for all subclasses can be specified in the
129 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
130 provides a simple way to add a product to all camera mappers.
132 Functions to map (provide a path to the data given a dataset
133 identifier dictionary) and standardize (convert data into some standard
134 format or type) may be provided in the subclass as "map_{dataset type}"
135 and "std_{dataset type}", respectively.
137 If non-Exposure datasets cannot be retrieved using standard
138 daf_persistence methods alone, a "bypass_{dataset type}" function may be
139 provided in the subclass to return the dataset instead of using the
140 "datasets" subpolicy.
142 Implementations of map_camera and bypass_camera that should typically be
143 sufficient are provided in this base class.
145 Notes
146 -----
147 .. todo::
149 Instead of auto-loading the camera at construction time, load it from
150 the calibration registry
152 Parameters
153 ----------
154 policy : daf_persistence.Policy,
155 Policy with per-camera defaults already merged.
156 repositoryDir : string
157 Policy repository for the subclassing module (obtained with
158 getRepositoryPath() on the per-camera default dictionary).
159 root : string, optional
160 Path to the root directory for data.
161 registry : string, optional
162 Path to registry with data's metadata.
163 calibRoot : string, optional
164 Root directory for calibrations.
165 calibRegistry : string, optional
166 Path to registry with calibrations' metadata.
167 provided : list of string, optional
168 Keys provided by the mapper.
169 parentRegistry : Registry subclass, optional
170 Registry from a parent repository that may be used to look up
171 data's metadata.
172 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
173 The configuration information for the repository this mapper is
174 being used with.
175 """
176 packageName = None
178 # a class or subclass of MakeRawVisitInfo, a functor that makes an
179 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
180 MakeRawVisitInfoClass = MakeRawVisitInfo
182 # a class or subclass of PupilFactory
183 PupilFactoryClass = afwCameraGeom.PupilFactory
185 # Class to use for metadata translations
186 translatorClass = None
188 # Gen3 instrument corresponding to this mapper
189 # Can be a class or a string with the full name of the class
190 _gen3instrument = None
192 def __init__(self, policy, repositoryDir,
193 root=None, registry=None, calibRoot=None, calibRegistry=None,
194 provided=None, parentRegistry=None, repositoryCfg=None):
196 dafPersist.Mapper.__init__(self)
198 self.log = lsstLog.Log.getLogger("CameraMapper")
200 if root:
201 self.root = root
202 elif repositoryCfg:
203 self.root = repositoryCfg.root
204 else:
205 self.root = None
207 repoPolicy = repositoryCfg.policy if repositoryCfg else None
208 if repoPolicy is not None:
209 policy.update(repoPolicy)
211 # Levels
212 self.levels = dict()
213 if 'levels' in policy:
214 levelsPolicy = policy['levels']
215 for key in levelsPolicy.names(True):
216 self.levels[key] = set(levelsPolicy.asArray(key))
217 self.defaultLevel = policy['defaultLevel']
218 self.defaultSubLevels = dict()
219 if 'defaultSubLevels' in policy:
220 self.defaultSubLevels = policy['defaultSubLevels']
222 # Root directories
223 if root is None:
224 root = "."
225 root = dafPersist.LogicalLocation(root).locString()
227 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
229 # If the calibRoot is passed in, use that. If not and it's indicated in
230 # the policy, use that. And otherwise, the calibs are in the regular
231 # root.
232 # If the location indicated by the calib root does not exist, do not
233 # create it.
234 calibStorage = None
235 if calibRoot is not None:
236 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
237 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
238 create=False)
239 else:
240 calibRoot = policy.get('calibRoot', None)
241 if calibRoot:
242 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
243 create=False)
244 if calibStorage is None:
245 calibStorage = self.rootStorage
247 self.root = root
249 # Registries
250 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
251 self.rootStorage, searchParents=False,
252 posixIfNoSql=(not parentRegistry))
253 if not self.registry:
254 self.registry = parentRegistry
255 needCalibRegistry = policy.get('needCalibRegistry', None)
256 if needCalibRegistry:
257 if calibStorage:
258 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
259 "calibRegistryPath", calibStorage,
260 posixIfNoSql=False) # NB never use posix for calibs
261 else:
262 raise RuntimeError(
263 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
264 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
265 else:
266 self.calibRegistry = None
268 # Dict of valid keys and their value types
269 self.keyDict = dict()
271 self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
272 self._initWriteRecipes()
274 # Camera geometry
275 self.cameraDataLocation = None # path to camera geometry config file
276 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
278 # Filter translation table
279 self.filters = None
281 # verify that the class variable packageName is set before attempting
282 # to instantiate an instance
283 if self.packageName is None:
284 raise ValueError('class variable packageName must not be None')
286 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log)
288 # Assign a metadata translator if one has not been defined by
289 # subclass. We can sometimes infer one from the RawVisitInfo
290 # class.
291 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
292 self.translatorClass = self.makeRawVisitInfo.metadataTranslator
294 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
295 """Initialize mappings
297 For each of the dataset types that we want to be able to read, there
298 are methods that can be created to support them:
299 * map_<dataset> : determine the path for dataset
300 * std_<dataset> : standardize the retrieved dataset
301 * bypass_<dataset> : retrieve the dataset (bypassing the usual
302 retrieval machinery)
303 * query_<dataset> : query the registry
305 Besides the dataset types explicitly listed in the policy, we create
306 additional, derived datasets for additional conveniences,
307 e.g., reading the header of an image, retrieving only the size of a
308 catalog.
310 Parameters
311 ----------
312 policy : `lsst.daf.persistence.Policy`
313 Policy with per-camera defaults already merged
314 rootStorage : `Storage subclass instance`
315 Interface to persisted repository data.
316 calibRoot : `Storage subclass instance`
317 Interface to persisted calib repository data
318 provided : `list` of `str`
319 Keys provided by the mapper
320 """
321 # Sub-dictionaries (for exposure/calibration/dataset types)
322 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323 "obs_base", "ImageMappingDefaults.yaml", "policy"))
324 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
325 "obs_base", "ExposureMappingDefaults.yaml", "policy"))
326 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
327 "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
328 dsMappingPolicy = dafPersist.Policy()
330 # Mappings
331 mappingList = (
332 ("images", imgMappingPolicy, ImageMapping),
333 ("exposures", expMappingPolicy, ExposureMapping),
334 ("calibrations", calMappingPolicy, CalibrationMapping),
335 ("datasets", dsMappingPolicy, DatasetMapping)
336 )
337 self.mappings = dict()
338 for name, defPolicy, cls in mappingList:
339 if name in policy:
340 datasets = policy[name]
342 # Centrally-defined datasets
343 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
344 if os.path.exists(defaultsPath):
345 datasets.merge(dafPersist.Policy(defaultsPath))
347 mappings = dict()
348 setattr(self, name, mappings)
349 for datasetType in datasets.names(True):
350 subPolicy = datasets[datasetType]
351 subPolicy.merge(defPolicy)
353 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
354 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
355 subPolicy=subPolicy):
356 components = subPolicy.get('composite')
357 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
358 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
359 python = subPolicy['python']
360 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
361 disassembler=disassembler,
362 python=python,
363 dataId=dataId,
364 mapper=self)
365 for name, component in components.items():
366 butlerComposite.add(id=name,
367 datasetType=component.get('datasetType'),
368 setter=component.get('setter', None),
369 getter=component.get('getter', None),
370 subset=component.get('subset', False),
371 inputOnly=component.get('inputOnly', False))
372 return butlerComposite
373 setattr(self, "map_" + datasetType, compositeClosure)
374 # for now at least, don't set up any other handling for
375 # this dataset type.
376 continue
378 if name == "calibrations":
379 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
380 provided=provided, dataRoot=rootStorage)
381 else:
382 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
384 if datasetType in self.mappings:
385 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
386 self.keyDict.update(mapping.keys())
387 mappings[datasetType] = mapping
388 self.mappings[datasetType] = mapping
389 if not hasattr(self, "map_" + datasetType):
390 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
391 return mapping.map(mapper, dataId, write)
392 setattr(self, "map_" + datasetType, mapClosure)
393 if not hasattr(self, "query_" + datasetType):
394 def queryClosure(format, dataId, mapping=mapping):
395 return mapping.lookup(format, dataId)
396 setattr(self, "query_" + datasetType, queryClosure)
397 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
398 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
399 return mapping.standardize(mapper, item, dataId)
400 setattr(self, "std_" + datasetType, stdClosure)
402 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
403 """Set convenience methods on CameraMapper"""
404 mapName = "map_" + datasetType + "_" + suffix
405 bypassName = "bypass_" + datasetType + "_" + suffix
406 queryName = "query_" + datasetType + "_" + suffix
407 if not hasattr(self, mapName):
408 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
409 if not hasattr(self, bypassName):
410 if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
411 bypassImpl = getattr(self, "bypass_" + datasetType)
412 if bypassImpl is not None:
413 setattr(self, bypassName, bypassImpl)
414 if not hasattr(self, queryName):
415 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
417 # Filename of dataset
418 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
419 [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
420 # Metadata from FITS file
421 if subPolicy["storage"] == "FitsStorage": # a FITS image
422 def getMetadata(datasetType, pythonType, location, dataId):
423 md = readMetadata(location.getLocationsWithRoot()[0])
424 fix_header(md, translator_class=self.translatorClass)
425 return md
427 setMethods("md", bypassImpl=getMetadata)
429 # Add support for configuring FITS compression
430 addName = "add_" + datasetType
431 if not hasattr(self, addName):
432 setattr(self, addName, self.getImageCompressionSettings)
434 if name == "exposures":
435 def getSkyWcs(datasetType, pythonType, location, dataId):
436 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
437 return fitsReader.readWcs()
439 setMethods("wcs", bypassImpl=getSkyWcs)
441 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
442 """Create a SkyWcs from the un-modified raw
443 FITS WCS header keys."""
444 if datasetType[:3] != "raw":
445 raise dafPersist.NoResults("Can only get header WCS for raw exposures.",
446 datasetType, dataId)
447 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
449 setMethods("header_wcs", bypassImpl=getRawHeaderWcs)
451 def getPhotoCalib(datasetType, pythonType, location, dataId):
452 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
453 return fitsReader.readPhotoCalib()
455 setMethods("photoCalib", bypassImpl=getPhotoCalib)
457 def getVisitInfo(datasetType, pythonType, location, dataId):
458 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
459 return fitsReader.readVisitInfo()
461 setMethods("visitInfo", bypassImpl=getVisitInfo)
463 # TODO: deprecate in DM-27170, remove in DM-27177
464 def getFilter(datasetType, pythonType, location, dataId):
465 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
466 return fitsReader.readFilter()
468 setMethods("filter", bypassImpl=getFilter)
470 # TODO: deprecate in DM-27177, remove in DM-27811
471 def getFilterLabel(datasetType, pythonType, location, dataId):
472 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
473 return fitsReader.readFilterLabel()
475 setMethods("filterLabel", bypassImpl=getFilterLabel)
477 setMethods("detector",
478 mapImpl=lambda dataId, write=False:
479 dafPersist.ButlerLocation(
480 pythonType="lsst.afw.cameraGeom.CameraConfig",
481 cppType="Config",
482 storageName="Internal",
483 locationList="ignored",
484 dataId=dataId,
485 mapper=self,
486 storage=None,
487 ),
488 bypassImpl=lambda datasetType, pythonType, location, dataId:
489 self.camera[self._extractDetectorName(dataId)]
490 )
492 def getBBox(datasetType, pythonType, location, dataId):
493 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
494 fix_header(md, translator_class=self.translatorClass)
495 return afwImage.bboxFromMetadata(md)
497 setMethods("bbox", bypassImpl=getBBox)
499 elif name == "images":
500 def getBBox(datasetType, pythonType, location, dataId):
501 md = readMetadata(location.getLocationsWithRoot()[0])
502 fix_header(md, translator_class=self.translatorClass)
503 return afwImage.bboxFromMetadata(md)
504 setMethods("bbox", bypassImpl=getBBox)
506 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
508 def getMetadata(datasetType, pythonType, location, dataId):
509 md = readMetadata(os.path.join(location.getStorage().root,
510 location.getLocations()[0]), hdu=1)
511 fix_header(md, translator_class=self.translatorClass)
512 return md
514 setMethods("md", bypassImpl=getMetadata)
516 # Sub-images
517 if subPolicy["storage"] == "FitsStorage":
518 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
519 subId = dataId.copy()
520 del subId['bbox']
521 loc = mapping.map(mapper, subId, write)
522 bbox = dataId['bbox']
523 llcX = bbox.getMinX()
524 llcY = bbox.getMinY()
525 width = bbox.getWidth()
526 height = bbox.getHeight()
527 loc.additionalData.set('llcX', llcX)
528 loc.additionalData.set('llcY', llcY)
529 loc.additionalData.set('width', width)
530 loc.additionalData.set('height', height)
531 if 'imageOrigin' in dataId:
532 loc.additionalData.set('imageOrigin',
533 dataId['imageOrigin'])
534 return loc
536 def querySubClosure(key, format, dataId, mapping=mapping):
537 subId = dataId.copy()
538 del subId['bbox']
539 return mapping.lookup(format, subId)
540 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
542 if subPolicy["storage"] == "FitsCatalogStorage":
543 # Length of catalog
545 def getLen(datasetType, pythonType, location, dataId):
546 md = readMetadata(os.path.join(location.getStorage().root,
547 location.getLocations()[0]), hdu=1)
548 fix_header(md, translator_class=self.translatorClass)
549 return md["NAXIS2"]
551 setMethods("len", bypassImpl=getLen)
553 # Schema of catalog
554 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
555 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
556 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
557 location.getLocations()[0])))
559 def _computeCcdExposureId(self, dataId):
560 """Compute the 64-bit (long) identifier for a CCD exposure.
562 Subclasses must override
564 Parameters
565 ----------
566 dataId : `dict`
567 Data identifier with visit, ccd.
568 """
569 raise NotImplementedError()
571 def _computeCoaddExposureId(self, dataId, singleFilter):
572 """Compute the 64-bit (long) identifier for a coadd.
574 Subclasses must override
576 Parameters
577 ----------
578 dataId : `dict`
579 Data identifier with tract and patch.
580 singleFilter : `bool`
581 True means the desired ID is for a single-filter coadd, in which
582 case dataIdmust contain filter.
583 """
584 raise NotImplementedError()
586 def _search(self, path):
587 """Search for path in the associated repository's storage.
589 Parameters
590 ----------
591 path : string
592 Path that describes an object in the repository associated with
593 this mapper.
594 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
595 indicator will be stripped when searching and so will match
596 filenames without the HDU indicator, e.g. 'foo.fits'. The path
597 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
599 Returns
600 -------
601 string
602 The path for this object in the repository. Will return None if the
603 object can't be found. If the input argument path contained an HDU
604 indicator, the returned path will also contain the HDU indicator.
605 """
606 return self.rootStorage.search(path)
608 def backup(self, datasetType, dataId):
609 """Rename any existing object with the given type and dataId.
611 The CameraMapper implementation saves objects in a sequence of e.g.:
613 - foo.fits
614 - foo.fits~1
615 - foo.fits~2
617 All of the backups will be placed in the output repo, however, and will
618 not be removed if they are found elsewhere in the _parent chain. This
619 means that the same file will be stored twice if the previous version
620 was found in an input repo.
621 """
623 # Calling PosixStorage directly is not the long term solution in this
624 # function, this is work-in-progress on epic DM-6225. The plan is for
625 # parentSearch to be changed to 'search', and search only the storage
626 # associated with this mapper. All searching of parents will be handled
627 # by traversing the container of repositories in Butler.
629 def firstElement(list):
630 """Get the first element in the list, or None if that can't be
631 done.
632 """
633 return list[0] if list is not None and len(list) else None
635 n = 0
636 newLocation = self.map(datasetType, dataId, write=True)
637 newPath = newLocation.getLocations()[0]
638 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
639 path = firstElement(path)
640 oldPaths = []
641 while path is not None:
642 n += 1
643 oldPaths.append((n, path))
644 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
645 path = firstElement(path)
646 for n, oldPath in reversed(oldPaths):
647 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
649 def keys(self):
650 """Return supported keys.
652 Returns
653 -------
654 iterable
655 List of keys usable in a dataset identifier
656 """
657 return iter(self.keyDict.keys())
659 def getKeys(self, datasetType, level):
660 """Return a dict of supported keys and their value types for a given
661 dataset type at a given level of the key hierarchy.
663 Parameters
664 ----------
665 datasetType : `str`
666 Dataset type or None for all dataset types.
667 level : `str` or None
668 Level or None for all levels or '' for the default level for the
669 camera.
671 Returns
672 -------
673 `dict`
674 Keys are strings usable in a dataset identifier, values are their
675 value types.
676 """
678 # not sure if this is how we want to do this. what if None was
679 # intended?
680 if level == '':
681 level = self.getDefaultLevel()
683 if datasetType is None:
684 keyDict = copy.copy(self.keyDict)
685 else:
686 keyDict = self.mappings[datasetType].keys()
687 if level is not None and level in self.levels:
688 keyDict = copy.copy(keyDict)
689 for lev in self.levels[level]:
690 if lev in keyDict:
691 del keyDict[lev]
692 return keyDict
694 def getDefaultLevel(self):
695 return self.defaultLevel
697 def getDefaultSubLevel(self, level):
698 if level in self.defaultSubLevels:
699 return self.defaultSubLevels[level]
700 return None
702 @classmethod
703 def getCameraName(cls):
704 """Return the name of the camera that this CameraMapper is for."""
705 className = str(cls)
706 className = className[className.find('.'):-1]
707 m = re.search(r'(\w+)Mapper', className)
708 if m is None:
709 m = re.search(r"class '[\w.]*?(\w+)'", className)
710 name = m.group(1)
711 return name[:1].lower() + name[1:] if name else ''
713 @classmethod
714 def getPackageName(cls):
715 """Return the name of the package containing this CameraMapper."""
716 if cls.packageName is None:
717 raise ValueError('class variable packageName must not be None')
718 return cls.packageName
720 @classmethod
721 def getGen3Instrument(cls):
722 """Return the gen3 Instrument class equivalent for this gen2 Mapper.
724 Returns
725 -------
726 instr : `type`
727 A `~lsst.obs.base.Instrument` class.
728 """
729 if cls._gen3instrument is None:
730 raise NotImplementedError("Please provide a specific implementation for your instrument"
731 " to enable conversion of this gen2 repository to gen3")
732 if isinstance(cls._gen3instrument, str):
733 # Given a string to convert to an instrument class
734 cls._gen3instrument = doImport(cls._gen3instrument)
735 if not issubclass(cls._gen3instrument, Instrument):
736 raise ValueError(f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}"
737 " but that is not an lsst.obs.base.Instrument")
738 return cls._gen3instrument
740 @classmethod
741 def getPackageDir(cls):
742 """Return the base directory of this package"""
743 return getPackageDir(cls.getPackageName())
745 def map_camera(self, dataId, write=False):
746 """Map a camera dataset."""
747 if self.camera is None:
748 raise RuntimeError("No camera dataset available.")
749 actualId = self._transformId(dataId)
750 return dafPersist.ButlerLocation(
751 pythonType="lsst.afw.cameraGeom.CameraConfig",
752 cppType="Config",
753 storageName="ConfigStorage",
754 locationList=self.cameraDataLocation or "ignored",
755 dataId=actualId,
756 mapper=self,
757 storage=self.rootStorage
758 )
760 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
761 """Return the (preloaded) camera object.
762 """
763 if self.camera is None:
764 raise RuntimeError("No camera dataset available.")
765 return self.camera
767 def map_expIdInfo(self, dataId, write=False):
768 return dafPersist.ButlerLocation(
769 pythonType="lsst.obs.base.ExposureIdInfo",
770 cppType=None,
771 storageName="Internal",
772 locationList="ignored",
773 dataId=dataId,
774 mapper=self,
775 storage=self.rootStorage
776 )
778 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
779 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
780 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
781 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
782 return ExposureIdInfo(expId=expId, expBits=expBits)
784 def std_bfKernel(self, item, dataId):
785 """Disable standardization for bfKernel
787 bfKernel is a calibration product that is numpy array,
788 unlike other calibration products that are all images;
789 all calibration images are sent through _standardizeExposure
790 due to CalibrationMapping, but we don't want that to happen to bfKernel
791 """
792 return item
794 def std_raw(self, item, dataId):
795 """Standardize a raw dataset by converting it to an Exposure instead
796 of an Image"""
797 return self._standardizeExposure(self.exposures['raw'], item, dataId,
798 trimmed=False, setVisitInfo=True)
800 def map_skypolicy(self, dataId):
801 """Map a sky policy."""
802 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
803 "Internal", None, None, self,
804 storage=self.rootStorage)
806 def std_skypolicy(self, item, dataId):
807 """Standardize a sky policy by returning the one we use."""
808 return self.skypolicy
810###############################################################################
811#
812# Utility functions
813#
814###############################################################################
816 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
817 posixIfNoSql=True):
818 """Set up a registry (usually SQLite3), trying a number of possible
819 paths.
821 Parameters
822 ----------
823 name : string
824 Name of registry.
825 description: `str`
826 Description of registry (for log messages)
827 path : string
828 Path for registry.
829 policy : string
830 Policy that contains the registry name, used if path is None.
831 policyKey : string
832 Key in policy for registry path.
833 storage : Storage subclass
834 Repository Storage to look in.
835 searchParents : bool, optional
836 True if the search for a registry should follow any Butler v1
837 _parent symlinks.
838 posixIfNoSql : bool, optional
839 If an sqlite registry is not found, will create a posix registry if
840 this is True.
842 Returns
843 -------
844 lsst.daf.persistence.Registry
845 Registry object
846 """
847 if path is None and policyKey in policy:
848 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
849 if os.path.isabs(path):
850 raise RuntimeError("Policy should not indicate an absolute path for registry.")
851 if not storage.exists(path):
852 newPath = storage.instanceSearch(path)
854 newPath = newPath[0] if newPath is not None and len(newPath) else None
855 if newPath is None:
856 self.log.warn("Unable to locate registry at policy path (also looked in root): %s",
857 path)
858 path = newPath
859 else:
860 self.log.warn("Unable to locate registry at policy path: %s", path)
861 path = None
863 # Old Butler API was to indicate the registry WITH the repo folder,
864 # New Butler expects the registry to be in the repo folder. To support
865 # Old API, check to see if path starts with root, and if so, strip
866 # root from path. Currently only works with PosixStorage
867 try:
868 root = storage.root
869 if path and (path.startswith(root)):
870 path = path[len(root + '/'):]
871 except AttributeError:
872 pass
874 # determine if there is an sqlite registry and if not, try the posix
875 # registry.
876 registry = None
878 def search(filename, description):
879 """Search for file in storage
881 Parameters
882 ----------
883 filename : `str`
884 Filename to search for
885 description : `str`
886 Description of file, for error message.
888 Returns
889 -------
890 path : `str` or `None`
891 Path to file, or None
892 """
893 result = storage.instanceSearch(filename)
894 if result:
895 return result[0]
896 self.log.debug("Unable to locate %s: %s", description, filename)
897 return None
899 # Search for a suitable registry database
900 if path is None:
901 path = search("%s.pgsql" % name, "%s in root" % description)
902 if path is None:
903 path = search("%s.sqlite3" % name, "%s in root" % description)
904 if path is None:
905 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
907 if path is not None:
908 if not storage.exists(path):
909 newPath = storage.instanceSearch(path)
910 newPath = newPath[0] if newPath is not None and len(newPath) else None
911 if newPath is not None:
912 path = newPath
913 localFileObj = storage.getLocalFile(path)
914 self.log.info("Loading %s registry from %s", description, localFileObj.name)
915 registry = dafPersist.Registry.create(localFileObj.name)
916 localFileObj.close()
917 elif not registry and posixIfNoSql:
918 try:
919 self.log.info("Loading Posix %s registry from %s", description, storage.root)
920 registry = dafPersist.PosixRegistry(storage.root)
921 except Exception:
922 registry = None
924 return registry
926 def _transformId(self, dataId):
927 """Generate a standard ID dict from a camera-specific ID dict.
929 Canonical keys include:
930 - amp: amplifier name
931 - ccd: CCD name (in LSST this is a combination of raft and sensor)
932 The default implementation returns a copy of its input.
934 Parameters
935 ----------
936 dataId : `dict`
937 Dataset identifier; this must not be modified
939 Returns
940 -------
941 `dict`
942 Transformed dataset identifier.
943 """
945 return dataId.copy()
947 def _mapActualToPath(self, template, actualId):
948 """Convert a template path to an actual path, using the actual data
949 identifier. This implementation is usually sufficient but can be
950 overridden by the subclass.
952 Parameters
953 ----------
954 template : `str`
955 Template path
956 actualId : `dict`
957 Dataset identifier
959 Returns
960 -------
961 `str`
962 Pathname
963 """
965 try:
966 transformedId = self._transformId(actualId)
967 return template % transformedId
968 except Exception as e:
969 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
971 @staticmethod
972 def getShortCcdName(ccdName):
973 """Convert a CCD name to a form useful as a filename
975 The default implementation converts spaces to underscores.
976 """
977 return ccdName.replace(" ", "_")
979 def _extractDetectorName(self, dataId):
980 """Extract the detector (CCD) name from the dataset identifier.
982 The name in question is the detector name used by lsst.afw.cameraGeom.
984 Parameters
985 ----------
986 dataId : `dict`
987 Dataset identifier.
989 Returns
990 -------
991 `str`
992 Detector name
993 """
994 raise NotImplementedError("No _extractDetectorName() function specified")
996 def _setAmpDetector(self, item, dataId, trimmed=True):
997 """Set the detector object in an Exposure for an amplifier.
999 Defects are also added to the Exposure based on the detector object.
1001 Parameters
1002 ----------
1003 item : `lsst.afw.image.Exposure`
1004 Exposure to set the detector in.
1005 dataId : `dict`
1006 Dataset identifier
1007 trimmed : `bool`
1008 Should detector be marked as trimmed? (ignored)
1009 """
1011 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1013 def _setCcdDetector(self, item, dataId, trimmed=True):
1014 """Set the detector object in an Exposure for a CCD.
1016 Parameters
1017 ----------
1018 item : `lsst.afw.image.Exposure`
1019 Exposure to set the detector in.
1020 dataId : `dict`
1021 Dataset identifier
1022 trimmed : `bool`
1023 Should detector be marked as trimmed? (ignored)
1024 """
1025 if item.getDetector() is not None:
1026 return
1028 detectorName = self._extractDetectorName(dataId)
1029 detector = self.camera[detectorName]
1030 item.setDetector(detector)
1032 @staticmethod
1033 def _resolveFilters(definitions, idFilter, filterLabel):
1034 """Identify the filter(s) consistent with partial filter information.
1036 Parameters
1037 ----------
1038 definitions : `lsst.obs.base.FilterDefinitionCollection`
1039 The filter definitions in which to search for filters.
1040 idFilter : `str` or `None`
1041 The filter information provided in a data ID.
1042 filterLabel : `lsst.afw.image.FilterLabel` or `None`
1043 The filter information provided by an exposure; may be incomplete.
1045 Returns
1046 -------
1047 filters : `set` [`lsst.obs.base.FilterDefinition`]
1048 The set of filters consistent with ``idFilter``
1049 and ``filterLabel``.
1050 """
1051 # Assume none of the filter constraints actually wrong/contradictory.
1052 # Then taking the intersection of all constraints will give a unique
1053 # result if one exists.
1054 matches = set(definitions)
1055 if idFilter is not None:
1056 matches.intersection_update(definitions.findAll(idFilter))
1057 if filterLabel is not None and filterLabel.hasPhysicalLabel():
1058 matches.intersection_update(definitions.findAll(filterLabel.physicalLabel))
1059 if filterLabel is not None and filterLabel.hasBandLabel():
1060 matches.intersection_update(definitions.findAll(filterLabel.bandLabel))
1061 return matches
1063 def _setFilter(self, mapping, item, dataId):
1064 """Set the filter information in an Exposure.
1066 The Exposure should already have had a filter loaded, but the reader
1067 (in ``afw``) had to act on incomplete information. This method
1068 cross-checks the filter against the data ID and the standard list
1069 of filters.
1071 Parameters
1072 ----------
1073 mapping : `lsst.obs.base.Mapping`
1074 Where to get the data ID filter from.
1075 item : `lsst.afw.image.Exposure`
1076 Exposure to set the filter in.
1077 dataId : `dict`
1078 Dataset identifier.
1079 """
1080 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI)
1081 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1082 return
1084 try:
1085 # getGen3Instrument returns class; need to construct it.
1086 filterDefinitions = self.getGen3Instrument()().filterDefinitions
1087 except NotImplementedError:
1088 filterDefinitions = None
1089 itemFilter = item.getFilterLabel() # may be None
1090 try:
1091 idFilter = mapping.need(['filter'], dataId)['filter']
1092 except dafPersist.NoResults:
1093 idFilter = None
1095 if filterDefinitions is not None:
1096 definitions = self._resolveFilters(filterDefinitions, idFilter, itemFilter)
1097 self.log.debug("Matching filters for id=%r and label=%r are %s.",
1098 idFilter, itemFilter, definitions)
1099 if len(definitions) == 1:
1100 newLabel = list(definitions)[0].makeFilterLabel()
1101 if newLabel != itemFilter:
1102 item.setFilterLabel(newLabel)
1103 elif definitions:
1104 self.log.warn("Multiple matches for filter %r with data ID %r.", itemFilter, idFilter)
1105 # Can we at least add a band?
1106 # Never expect multiple definitions with same physical filter.
1107 bands = {d.band for d in definitions} # None counts as separate result!
1108 if len(bands) == 1 and itemFilter is None:
1109 band = list(bands)[0]
1110 item.setFilterLabel(afwImage.FilterLabel(band=band))
1111 else:
1112 # Unknown filter, nothing to be done.
1113 self.log.warn("Cannot reconcile filter %r with data ID %r.", itemFilter, idFilter)
1114 else:
1115 if itemFilter is None:
1116 # Old Filter cleanup, without the benefit of FilterDefinition
1117 if self.filters is not None and idFilter in self.filters:
1118 idFilter = self.filters[idFilter]
1119 try:
1120 # TODO: remove in DM-27177; at that point may not be able
1121 # to process IDs without FilterDefinition.
1122 with warnings.catch_warnings():
1123 warnings.filterwarnings("ignore", category=FutureWarning)
1124 item.setFilter(afwImage.Filter(idFilter))
1125 except pexExcept.NotFoundError:
1126 self.log.warn("Filter %s not defined. Set to UNKNOWN.", idFilter)
1128 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1129 trimmed=True, setVisitInfo=True):
1130 """Default standardization function for images.
1132 This sets the Detector from the camera geometry
1133 and optionally set the Filter. In both cases this saves
1134 having to persist some data in each exposure (or image).
1136 Parameters
1137 ----------
1138 mapping : `lsst.obs.base.Mapping`
1139 Where to get the values from.
1140 item : image-like object
1141 Can be any of lsst.afw.image.Exposure,
1142 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1143 or lsst.afw.image.MaskedImage
1145 dataId : `dict`
1146 Dataset identifier
1147 filter : `bool`
1148 Set filter? Ignored if item is already an exposure
1149 trimmed : `bool`
1150 Should detector be marked as trimmed?
1151 setVisitInfo : `bool`
1152 Should Exposure have its VisitInfo filled out from the metadata?
1154 Returns
1155 -------
1156 `lsst.afw.image.Exposure`
1157 The standardized Exposure.
1158 """
1159 try:
1160 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log,
1161 setVisitInfo=setVisitInfo)
1162 except Exception as e:
1163 self.log.error("Could not turn item=%r into an exposure: %s" % (repr(item), e))
1164 raise
1166 if mapping.level.lower() == "amp":
1167 self._setAmpDetector(exposure, dataId, trimmed)
1168 elif mapping.level.lower() == "ccd":
1169 self._setCcdDetector(exposure, dataId, trimmed)
1171 # We can only create a WCS if it doesn't already have one and
1172 # we have either a VisitInfo or exposure metadata.
1173 # Do not calculate a WCS if this is an amplifier exposure
1174 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \
1175 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()):
1176 self._createInitialSkyWcs(exposure)
1178 if filter:
1179 self._setFilter(mapping, exposure, dataId)
1181 return exposure
1183 def _createSkyWcsFromMetadata(self, exposure):
1184 """Create a SkyWcs from the FITS header metadata in an Exposure.
1186 Parameters
1187 ----------
1188 exposure : `lsst.afw.image.Exposure`
1189 The exposure to get metadata from, and attach the SkyWcs to.
1190 """
1191 metadata = exposure.getMetadata()
1192 fix_header(metadata, translator_class=self.translatorClass)
1193 try:
1194 wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1195 exposure.setWcs(wcs)
1196 except pexExcept.TypeError as e:
1197 # See DM-14372 for why this is debug and not warn (e.g. calib
1198 # files without wcs metadata).
1199 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:"
1200 " %s", e.args[0])
1201 # ensure any WCS values stripped from the metadata are removed in the
1202 # exposure
1203 exposure.setMetadata(metadata)
1205 def _createInitialSkyWcs(self, exposure):
1206 """Create a SkyWcs from the boresight and camera geometry.
1208 If the boresight or camera geometry do not support this method of
1209 WCS creation, this falls back on the header metadata-based version
1210 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1212 Parameters
1213 ----------
1214 exposure : `lsst.afw.image.Exposure`
1215 The exposure to get data from, and attach the SkyWcs to.
1216 """
1217 # Always use try to use metadata first, to strip WCS keys from it.
1218 self._createSkyWcsFromMetadata(exposure)
1220 if exposure.getInfo().getVisitInfo() is None:
1221 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1222 self.log.warn(msg)
1223 return
1224 try:
1225 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1226 exposure.setWcs(newSkyWcs)
1227 except InitialSkyWcsError as e:
1228 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1229 self.log.warn(msg, e)
1230 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1231 if e.__context__ is not None:
1232 self.log.debug("Root-cause Exception was: %s",
1233 traceback.TracebackException.from_exception(e.__context__))
1235 def _makeCamera(self, policy, repositoryDir):
1236 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1237 the camera geometry
1239 Also set self.cameraDataLocation, if relevant (else it can be left
1240 None).
1242 This implementation assumes that policy contains an entry "camera"
1243 that points to the subdirectory in this package of camera data;
1244 specifically, that subdirectory must contain:
1245 - a file named `camera.py` that contains persisted camera config
1246 - ampInfo table FITS files, as required by
1247 lsst.afw.cameraGeom.makeCameraFromPath
1249 Parameters
1250 ----------
1251 policy : `lsst.daf.persistence.Policy`
1252 Policy with per-camera defaults already merged
1253 (PexPolicy only for backward compatibility).
1254 repositoryDir : `str`
1255 Policy repository for the subclassing module (obtained with
1256 getRepositoryPath() on the per-camera default dictionary).
1257 """
1258 if 'camera' not in policy:
1259 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1260 cameraDataSubdir = policy['camera']
1261 self.cameraDataLocation = os.path.normpath(
1262 os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1263 cameraConfig = afwCameraGeom.CameraConfig()
1264 cameraConfig.load(self.cameraDataLocation)
1265 ampInfoPath = os.path.dirname(self.cameraDataLocation)
1266 return afwCameraGeom.makeCameraFromPath(
1267 cameraConfig=cameraConfig,
1268 ampInfoPath=ampInfoPath,
1269 shortNameFunc=self.getShortCcdName,
1270 pupilFactoryClass=self.PupilFactoryClass
1271 )
1273 def getRegistry(self):
1274 """Get the registry used by this mapper.
1276 Returns
1277 -------
1278 Registry or None
1279 The registry used by this mapper for this mapper's repository.
1280 """
1281 return self.registry
1283 def getImageCompressionSettings(self, datasetType, dataId):
1284 """Stuff image compression settings into a daf.base.PropertySet
1286 This goes into the ButlerLocation's "additionalData", which gets
1287 passed into the boost::persistence framework.
1289 Parameters
1290 ----------
1291 datasetType : `str`
1292 Type of dataset for which to get the image compression settings.
1293 dataId : `dict`
1294 Dataset identifier.
1296 Returns
1297 -------
1298 additionalData : `lsst.daf.base.PropertySet`
1299 Image compression settings.
1300 """
1301 mapping = self.mappings[datasetType]
1302 recipeName = mapping.recipe
1303 storageType = mapping.storage
1304 if storageType not in self._writeRecipes:
1305 return dafBase.PropertySet()
1306 if recipeName not in self._writeRecipes[storageType]:
1307 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1308 (datasetType, storageType, recipeName))
1309 recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1310 seed = hash(tuple(dataId.items())) % 2**31
1311 for plane in ("image", "mask", "variance"):
1312 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1313 recipe.set(plane + ".scaling.seed", seed)
1314 return recipe
1316 def _initWriteRecipes(self):
1317 """Read the recipes for writing files
1319 These recipes are currently used for configuring FITS compression,
1320 but they could have wider uses for configuring different flavors
1321 of the storage types. A recipe is referred to by a symbolic name,
1322 which has associated settings. These settings are stored as a
1323 `PropertySet` so they can easily be passed down to the
1324 boost::persistence framework as the "additionalData" parameter.
1326 The list of recipes is written in YAML. A default recipe and
1327 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1328 and these may be overridden or supplemented by the individual obs_*
1329 packages' own policy/writeRecipes.yaml files.
1331 Recipes are grouped by the storage type. Currently, only the
1332 ``FitsStorage`` storage type uses recipes, which uses it to
1333 configure FITS image compression.
1335 Each ``FitsStorage`` recipe for FITS compression should define
1336 "image", "mask" and "variance" entries, each of which may contain
1337 "compression" and "scaling" entries. Defaults will be provided for
1338 any missing elements under "compression" and "scaling".
1340 The allowed entries under "compression" are:
1342 * algorithm (string): compression algorithm to use
1343 * rows (int): number of rows per tile (0 = entire dimension)
1344 * columns (int): number of columns per tile (0 = entire dimension)
1345 * quantizeLevel (float): cfitsio quantization level
1347 The allowed entries under "scaling" are:
1349 * algorithm (string): scaling algorithm to use
1350 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1351 * fuzz (bool): fuzz the values when quantising floating-point values?
1352 * seed (long): seed for random number generator when fuzzing
1353 * maskPlanes (list of string): mask planes to ignore when doing
1354 statistics
1355 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1356 * quantizePad: number of stdev to allow on the low side (for
1357 STDEV_POSITIVE/NEGATIVE)
1358 * bscale: manually specified BSCALE (for MANUAL scaling)
1359 * bzero: manually specified BSCALE (for MANUAL scaling)
1361 A very simple example YAML recipe:
1363 FitsStorage:
1364 default:
1365 image: &default
1366 compression:
1367 algorithm: GZIP_SHUFFLE
1368 mask: *default
1369 variance: *default
1370 """
1371 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1372 recipes = dafPersist.Policy(recipesFile)
1373 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1374 validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1375 if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1376 supplements = dafPersist.Policy(supplementsFile)
1377 # Don't allow overrides, only supplements
1378 for entry in validationMenu:
1379 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1380 if intersection:
1381 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1382 (supplementsFile, entry, recipesFile, intersection))
1383 recipes.update(supplements)
1385 self._writeRecipes = {}
1386 for storageType in recipes.names(True):
1387 if "default" not in recipes[storageType]:
1388 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1389 (storageType, recipesFile))
1390 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1393def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True):
1394 """Generate an Exposure from an image-like object
1396 If the image is a DecoratedImage then also set its WCS and metadata
1397 (Image and MaskedImage are missing the necessary metadata
1398 and Exposure already has those set)
1400 Parameters
1401 ----------
1402 image : Image-like object
1403 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1404 Exposure.
1406 Returns
1407 -------
1408 `lsst.afw.image.Exposure`
1409 Exposure containing input image.
1410 """
1411 translatorClass = None
1412 if mapper is not None:
1413 translatorClass = mapper.translatorClass
1415 metadata = None
1416 if isinstance(image, afwImage.MaskedImage):
1417 exposure = afwImage.makeExposure(image)
1418 elif isinstance(image, afwImage.DecoratedImage):
1419 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1420 metadata = image.getMetadata()
1421 fix_header(metadata, translator_class=translatorClass)
1422 exposure.setMetadata(metadata)
1423 elif isinstance(image, afwImage.Exposure):
1424 exposure = image
1425 metadata = exposure.getMetadata()
1426 fix_header(metadata, translator_class=translatorClass)
1427 else: # Image
1428 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1430 # set VisitInfo if we can
1431 if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1432 if metadata is not None:
1433 if mapper is None:
1434 if not logger:
1435 logger = lsstLog.Log.getLogger("CameraMapper")
1436 logger.warn("I can only set the VisitInfo if you provide a mapper")
1437 else:
1438 exposureId = mapper._computeCcdExposureId(dataId)
1439 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1441 exposure.getInfo().setVisitInfo(visitInfo)
1443 return exposure
1446def validateRecipeFitsStorage(recipes):
1447 """Validate recipes for FitsStorage
1449 The recipes are supplemented with default values where appropriate.
1451 TODO: replace this custom validation code with Cerberus (DM-11846)
1453 Parameters
1454 ----------
1455 recipes : `lsst.daf.persistence.Policy`
1456 FitsStorage recipes to validate.
1458 Returns
1459 -------
1460 validated : `lsst.daf.base.PropertySet`
1461 Validated FitsStorage recipe.
1463 Raises
1464 ------
1465 `RuntimeError`
1466 If validation fails.
1467 """
1468 # Schemas define what should be there, and the default values (and by the
1469 # default value, the expected type).
1470 compressionSchema = {
1471 "algorithm": "NONE",
1472 "rows": 1,
1473 "columns": 0,
1474 "quantizeLevel": 0.0,
1475 }
1476 scalingSchema = {
1477 "algorithm": "NONE",
1478 "bitpix": 0,
1479 "maskPlanes": ["NO_DATA"],
1480 "seed": 0,
1481 "quantizeLevel": 4.0,
1482 "quantizePad": 5.0,
1483 "fuzz": True,
1484 "bscale": 1.0,
1485 "bzero": 0.0,
1486 }
1488 def checkUnrecognized(entry, allowed, description):
1489 """Check to see if the entry contains unrecognised keywords"""
1490 unrecognized = set(entry.keys()) - set(allowed)
1491 if unrecognized:
1492 raise RuntimeError(
1493 "Unrecognized entries when parsing image compression recipe %s: %s" %
1494 (description, unrecognized))
1496 validated = {}
1497 for name in recipes.names(True):
1498 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1499 rr = dafBase.PropertySet()
1500 validated[name] = rr
1501 for plane in ("image", "mask", "variance"):
1502 checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1503 name + "->" + plane)
1505 for settings, schema in (("compression", compressionSchema),
1506 ("scaling", scalingSchema)):
1507 prefix = plane + "." + settings
1508 if settings not in recipes[name][plane]:
1509 for key in schema:
1510 rr.set(prefix + "." + key, schema[key])
1511 continue
1512 entry = recipes[name][plane][settings]
1513 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1514 for key in schema:
1515 value = type(schema[key])(entry[key]) if key in entry else schema[key]
1516 rr.set(prefix + "." + key, value)
1517 return validated