Coverage for python/lsst/obs/base/cameraMapper.py : 9%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import copy
23import os
24import re
25import traceback
26import warnings
27import weakref
28from deprecated.sphinx import deprecated
30from astro_metadata_translator import fix_header
31from lsst.utils import doImport
32import lsst.daf.persistence as dafPersist
33from . import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
34import lsst.daf.base as dafBase
35import lsst.afw.geom as afwGeom
36import lsst.afw.image as afwImage
37import lsst.afw.table as afwTable
38from lsst.afw.fits import readMetadata
39import lsst.afw.cameraGeom as afwCameraGeom
40import lsst.log as lsstLog
41import lsst.pex.exceptions as pexExcept
42from .exposureIdInfo import ExposureIdInfo
43from .makeRawVisitInfo import MakeRawVisitInfo
44from .utils import createInitialSkyWcs, InitialSkyWcsError
45from lsst.utils import getPackageDir
46from ._instrument import Instrument
48__all__ = ["CameraMapper", "exposureFromImage"]
51class CameraMapper(dafPersist.Mapper):
53 """CameraMapper is a base class for mappers that handle images from a
54 camera and products derived from them. This provides an abstraction layer
55 between the data on disk and the code.
57 Public methods: keys, queryMetadata, getDatasetTypes, map,
58 canStandardize, standardize
60 Mappers for specific data sources (e.g., CFHT Megacam, LSST
61 simulations, etc.) should inherit this class.
63 The CameraMapper manages datasets within a "root" directory. Note that
64 writing to a dataset present in the input root will hide the existing
65 dataset but not overwrite it. See #2160 for design discussion.
67 A camera is assumed to consist of one or more rafts, each composed of
68 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
69 (amps). A camera is also assumed to have a camera geometry description
70 (CameraGeom object) as a policy file, a filter description (Filter class
71 static configuration) as another policy file.
73 Information from the camera geometry and defects are inserted into all
74 Exposure objects returned.
76 The mapper uses one or two registries to retrieve metadata about the
77 images. The first is a registry of all raw exposures. This must contain
78 the time of the observation. One or more tables (or the equivalent)
79 within the registry are used to look up data identifier components that
80 are not specified by the user (e.g. filter) and to return results for
81 metadata queries. The second is an optional registry of all calibration
82 data. This should contain validity start and end entries for each
83 calibration dataset in the same timescale as the observation time.
85 Subclasses will typically set MakeRawVisitInfoClass and optionally the
86 metadata translator class:
88 MakeRawVisitInfoClass: a class variable that points to a subclass of
89 MakeRawVisitInfo, a functor that creates an
90 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
92 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
93 class to use for fixing metadata values. If it is not set an attempt
94 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
95 that the metadata fixup will try to infer the translator class from the
96 header itself.
98 Subclasses must provide the following methods:
100 _extractDetectorName(self, dataId): returns the detector name for a CCD
101 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
102 a dataset identifier referring to that CCD or a subcomponent of it.
104 _computeCcdExposureId(self, dataId): see below
106 _computeCoaddExposureId(self, dataId, singleFilter): see below
108 Subclasses may also need to override the following methods:
110 _transformId(self, dataId): transformation of a data identifier
111 from colloquial usage (e.g., "ccdname") to proper/actual usage
112 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
113 commas). The default implementation does nothing. Note that this
114 method should not modify its input parameter.
116 getShortCcdName(self, ccdName): a static method that returns a shortened
117 name suitable for use as a filename. The default version converts spaces
118 to underscores.
120 _mapActualToPath(self, template, actualId): convert a template path to an
121 actual path, using the actual dataset identifier.
123 The mapper's behaviors are largely specified by the policy file.
124 See the MapperDictionary.paf for descriptions of the available items.
126 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
127 mappings (see Mappings class).
129 Common default mappings for all subclasses can be specified in the
130 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
131 provides a simple way to add a product to all camera mappers.
133 Functions to map (provide a path to the data given a dataset
134 identifier dictionary) and standardize (convert data into some standard
135 format or type) may be provided in the subclass as "map_{dataset type}"
136 and "std_{dataset type}", respectively.
138 If non-Exposure datasets cannot be retrieved using standard
139 daf_persistence methods alone, a "bypass_{dataset type}" function may be
140 provided in the subclass to return the dataset instead of using the
141 "datasets" subpolicy.
143 Implementations of map_camera and bypass_camera that should typically be
144 sufficient are provided in this base class.
146 Notes
147 -----
148 .. todo::
150 Instead of auto-loading the camera at construction time, load it from
151 the calibration registry
153 Parameters
154 ----------
155 policy : daf_persistence.Policy,
156 Policy with per-camera defaults already merged.
157 repositoryDir : string
158 Policy repository for the subclassing module (obtained with
159 getRepositoryPath() on the per-camera default dictionary).
160 root : string, optional
161 Path to the root directory for data.
162 registry : string, optional
163 Path to registry with data's metadata.
164 calibRoot : string, optional
165 Root directory for calibrations.
166 calibRegistry : string, optional
167 Path to registry with calibrations' metadata.
168 provided : list of string, optional
169 Keys provided by the mapper.
170 parentRegistry : Registry subclass, optional
171 Registry from a parent repository that may be used to look up
172 data's metadata.
173 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
174 The configuration information for the repository this mapper is
175 being used with.
176 """
177 packageName = None
179 # a class or subclass of MakeRawVisitInfo, a functor that makes an
180 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
181 MakeRawVisitInfoClass = MakeRawVisitInfo
183 # a class or subclass of PupilFactory
184 PupilFactoryClass = afwCameraGeom.PupilFactory
186 # Class to use for metadata translations
187 translatorClass = None
189 # Gen3 instrument corresponding to this mapper
190 # Can be a class or a string with the full name of the class
191 _gen3instrument = None
193 def __init__(self, policy, repositoryDir,
194 root=None, registry=None, calibRoot=None, calibRegistry=None,
195 provided=None, parentRegistry=None, repositoryCfg=None):
197 dafPersist.Mapper.__init__(self)
199 self.log = lsstLog.Log.getLogger("CameraMapper")
201 if root:
202 self.root = root
203 elif repositoryCfg:
204 self.root = repositoryCfg.root
205 else:
206 self.root = None
208 repoPolicy = repositoryCfg.policy if repositoryCfg else None
209 if repoPolicy is not None:
210 policy.update(repoPolicy)
212 # Levels
213 self.levels = dict()
214 if 'levels' in policy:
215 levelsPolicy = policy['levels']
216 for key in levelsPolicy.names(True):
217 self.levels[key] = set(levelsPolicy.asArray(key))
218 self.defaultLevel = policy['defaultLevel']
219 self.defaultSubLevels = dict()
220 if 'defaultSubLevels' in policy:
221 self.defaultSubLevels = policy['defaultSubLevels']
223 # Root directories
224 if root is None:
225 root = "."
226 root = dafPersist.LogicalLocation(root).locString()
228 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
230 # If the calibRoot is passed in, use that. If not and it's indicated in
231 # the policy, use that. And otherwise, the calibs are in the regular
232 # root.
233 # If the location indicated by the calib root does not exist, do not
234 # create it.
235 calibStorage = None
236 if calibRoot is not None:
237 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
238 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
239 create=False)
240 else:
241 calibRoot = policy.get('calibRoot', None)
242 if calibRoot:
243 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
244 create=False)
245 if calibStorage is None:
246 calibStorage = self.rootStorage
248 self.root = root
250 # Registries
251 self.registry = self._setupRegistry("registry", "exposure", registry, policy, "registryPath",
252 self.rootStorage, searchParents=False,
253 posixIfNoSql=(not parentRegistry))
254 if not self.registry:
255 self.registry = parentRegistry
256 needCalibRegistry = policy.get('needCalibRegistry', None)
257 if needCalibRegistry:
258 if calibStorage:
259 self.calibRegistry = self._setupRegistry("calibRegistry", "calib", calibRegistry, policy,
260 "calibRegistryPath", calibStorage,
261 posixIfNoSql=False) # NB never use posix for calibs
262 else:
263 raise RuntimeError(
264 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
265 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
266 else:
267 self.calibRegistry = None
269 # Dict of valid keys and their value types
270 self.keyDict = dict()
272 self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
273 self._initWriteRecipes()
275 # Camera geometry
276 self.cameraDataLocation = None # path to camera geometry config file
277 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
279 # Filter translation table
280 self.filters = None
282 # verify that the class variable packageName is set before attempting
283 # to instantiate an instance
284 if self.packageName is None:
285 raise ValueError('class variable packageName must not be None')
287 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log)
289 # Assign a metadata translator if one has not been defined by
290 # subclass. We can sometimes infer one from the RawVisitInfo
291 # class.
292 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
293 self.translatorClass = self.makeRawVisitInfo.metadataTranslator
295 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
296 """Initialize mappings
298 For each of the dataset types that we want to be able to read, there
299 are methods that can be created to support them:
300 * map_<dataset> : determine the path for dataset
301 * std_<dataset> : standardize the retrieved dataset
302 * bypass_<dataset> : retrieve the dataset (bypassing the usual
303 retrieval machinery)
304 * query_<dataset> : query the registry
306 Besides the dataset types explicitly listed in the policy, we create
307 additional, derived datasets for additional conveniences,
308 e.g., reading the header of an image, retrieving only the size of a
309 catalog.
311 Parameters
312 ----------
313 policy : `lsst.daf.persistence.Policy`
314 Policy with per-camera defaults already merged
315 rootStorage : `Storage subclass instance`
316 Interface to persisted repository data.
317 calibRoot : `Storage subclass instance`
318 Interface to persisted calib repository data
319 provided : `list` of `str`
320 Keys provided by the mapper
321 """
322 # Sub-dictionaries (for exposure/calibration/dataset types)
323 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324 "obs_base", "ImageMappingDefaults.yaml", "policy"))
325 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
326 "obs_base", "ExposureMappingDefaults.yaml", "policy"))
327 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
328 "obs_base", "CalibrationMappingDefaults.yaml", "policy"))
329 dsMappingPolicy = dafPersist.Policy()
331 # Mappings
332 mappingList = (
333 ("images", imgMappingPolicy, ImageMapping),
334 ("exposures", expMappingPolicy, ExposureMapping),
335 ("calibrations", calMappingPolicy, CalibrationMapping),
336 ("datasets", dsMappingPolicy, DatasetMapping)
337 )
338 self.mappings = dict()
339 for name, defPolicy, cls in mappingList:
340 if name in policy:
341 datasets = policy[name]
343 # Centrally-defined datasets
344 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
345 if os.path.exists(defaultsPath):
346 datasets.merge(dafPersist.Policy(defaultsPath))
348 mappings = dict()
349 setattr(self, name, mappings)
350 for datasetType in datasets.names(True):
351 subPolicy = datasets[datasetType]
352 subPolicy.merge(defPolicy)
354 if not hasattr(self, "map_" + datasetType) and 'composite' in subPolicy:
355 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
356 subPolicy=subPolicy):
357 components = subPolicy.get('composite')
358 assembler = subPolicy['assembler'] if 'assembler' in subPolicy else None
359 disassembler = subPolicy['disassembler'] if 'disassembler' in subPolicy else None
360 python = subPolicy['python']
361 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
362 disassembler=disassembler,
363 python=python,
364 dataId=dataId,
365 mapper=self)
366 for name, component in components.items():
367 butlerComposite.add(id=name,
368 datasetType=component.get('datasetType'),
369 setter=component.get('setter', None),
370 getter=component.get('getter', None),
371 subset=component.get('subset', False),
372 inputOnly=component.get('inputOnly', False))
373 return butlerComposite
374 setattr(self, "map_" + datasetType, compositeClosure)
375 # for now at least, don't set up any other handling for
376 # this dataset type.
377 continue
379 if name == "calibrations":
380 mapping = cls(datasetType, subPolicy, self.registry, self.calibRegistry, calibStorage,
381 provided=provided, dataRoot=rootStorage)
382 else:
383 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
385 if datasetType in self.mappings:
386 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
387 self.keyDict.update(mapping.keys())
388 mappings[datasetType] = mapping
389 self.mappings[datasetType] = mapping
390 if not hasattr(self, "map_" + datasetType):
391 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
392 return mapping.map(mapper, dataId, write)
393 setattr(self, "map_" + datasetType, mapClosure)
394 if not hasattr(self, "query_" + datasetType):
395 def queryClosure(format, dataId, mapping=mapping):
396 return mapping.lookup(format, dataId)
397 setattr(self, "query_" + datasetType, queryClosure)
398 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
399 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
400 return mapping.standardize(mapper, item, dataId)
401 setattr(self, "std_" + datasetType, stdClosure)
403 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
404 """Set convenience methods on CameraMapper"""
405 mapName = "map_" + datasetType + "_" + suffix
406 bypassName = "bypass_" + datasetType + "_" + suffix
407 queryName = "query_" + datasetType + "_" + suffix
408 if not hasattr(self, mapName):
409 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
410 if not hasattr(self, bypassName):
411 if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
412 bypassImpl = getattr(self, "bypass_" + datasetType)
413 if bypassImpl is not None:
414 setattr(self, bypassName, bypassImpl)
415 if not hasattr(self, queryName):
416 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
418 # Filename of dataset
419 setMethods("filename", bypassImpl=lambda datasetType, pythonType, location, dataId:
420 [os.path.join(location.getStorage().root, p) for p in location.getLocations()])
421 # Metadata from FITS file
422 if subPolicy["storage"] == "FitsStorage": # a FITS image
423 def getMetadata(datasetType, pythonType, location, dataId):
424 md = readMetadata(location.getLocationsWithRoot()[0])
425 fix_header(md, translator_class=self.translatorClass)
426 return md
428 setMethods("md", bypassImpl=getMetadata)
430 # Add support for configuring FITS compression
431 addName = "add_" + datasetType
432 if not hasattr(self, addName):
433 setattr(self, addName, self.getImageCompressionSettings)
435 if name == "exposures":
436 def getSkyWcs(datasetType, pythonType, location, dataId):
437 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
438 return fitsReader.readWcs()
440 setMethods("wcs", bypassImpl=getSkyWcs)
442 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
443 """Create a SkyWcs from the un-modified raw
444 FITS WCS header keys."""
445 if datasetType[:3] != "raw":
446 raise dafPersist.NoResults("Can only get header WCS for raw exposures.",
447 datasetType, dataId)
448 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
450 setMethods("header_wcs", bypassImpl=getRawHeaderWcs)
452 def getPhotoCalib(datasetType, pythonType, location, dataId):
453 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
454 return fitsReader.readPhotoCalib()
456 setMethods("photoCalib", bypassImpl=getPhotoCalib)
458 def getVisitInfo(datasetType, pythonType, location, dataId):
459 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
460 return fitsReader.readVisitInfo()
462 setMethods("visitInfo", bypassImpl=getVisitInfo)
464 # TODO: remove in DM-27177
465 @deprecated(reason="Replaced with getFilterLabel. Will be removed after v22.",
466 category=FutureWarning, version="v22")
467 def getFilter(datasetType, pythonType, location, dataId):
468 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
469 return fitsReader.readFilter()
471 setMethods("filter", bypassImpl=getFilter)
473 # TODO: deprecate in DM-27177, remove in DM-27811
474 def getFilterLabel(datasetType, pythonType, location, dataId):
475 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
476 storedFilter = fitsReader.readFilterLabel()
478 # Apply standardization used by full Exposure
479 try:
480 # mapping is local to enclosing scope
481 idFilter = mapping.need(['filter'], dataId)['filter']
482 except dafPersist.NoResults:
483 idFilter = None
484 bestFilter = self._getBestFilter(storedFilter, idFilter)
485 if bestFilter is not None:
486 return bestFilter
487 else:
488 return storedFilter
490 setMethods("filterLabel", bypassImpl=getFilterLabel)
492 setMethods("detector",
493 mapImpl=lambda dataId, write=False:
494 dafPersist.ButlerLocation(
495 pythonType="lsst.afw.cameraGeom.CameraConfig",
496 cppType="Config",
497 storageName="Internal",
498 locationList="ignored",
499 dataId=dataId,
500 mapper=self,
501 storage=None,
502 ),
503 bypassImpl=lambda datasetType, pythonType, location, dataId:
504 self.camera[self._extractDetectorName(dataId)]
505 )
507 def getBBox(datasetType, pythonType, location, dataId):
508 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
509 fix_header(md, translator_class=self.translatorClass)
510 return afwImage.bboxFromMetadata(md)
512 setMethods("bbox", bypassImpl=getBBox)
514 elif name == "images":
515 def getBBox(datasetType, pythonType, location, dataId):
516 md = readMetadata(location.getLocationsWithRoot()[0])
517 fix_header(md, translator_class=self.translatorClass)
518 return afwImage.bboxFromMetadata(md)
519 setMethods("bbox", bypassImpl=getBBox)
521 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
523 def getMetadata(datasetType, pythonType, location, dataId):
524 md = readMetadata(os.path.join(location.getStorage().root,
525 location.getLocations()[0]), hdu=1)
526 fix_header(md, translator_class=self.translatorClass)
527 return md
529 setMethods("md", bypassImpl=getMetadata)
531 # Sub-images
532 if subPolicy["storage"] == "FitsStorage":
533 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
534 subId = dataId.copy()
535 del subId['bbox']
536 loc = mapping.map(mapper, subId, write)
537 bbox = dataId['bbox']
538 llcX = bbox.getMinX()
539 llcY = bbox.getMinY()
540 width = bbox.getWidth()
541 height = bbox.getHeight()
542 loc.additionalData.set('llcX', llcX)
543 loc.additionalData.set('llcY', llcY)
544 loc.additionalData.set('width', width)
545 loc.additionalData.set('height', height)
546 if 'imageOrigin' in dataId:
547 loc.additionalData.set('imageOrigin',
548 dataId['imageOrigin'])
549 return loc
551 def querySubClosure(key, format, dataId, mapping=mapping):
552 subId = dataId.copy()
553 del subId['bbox']
554 return mapping.lookup(format, subId)
555 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
557 if subPolicy["storage"] == "FitsCatalogStorage":
558 # Length of catalog
560 def getLen(datasetType, pythonType, location, dataId):
561 md = readMetadata(os.path.join(location.getStorage().root,
562 location.getLocations()[0]), hdu=1)
563 fix_header(md, translator_class=self.translatorClass)
564 return md["NAXIS2"]
566 setMethods("len", bypassImpl=getLen)
568 # Schema of catalog
569 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
570 setMethods("schema", bypassImpl=lambda datasetType, pythonType, location, dataId:
571 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
572 location.getLocations()[0])))
574 def _computeCcdExposureId(self, dataId):
575 """Compute the 64-bit (long) identifier for a CCD exposure.
577 Subclasses must override
579 Parameters
580 ----------
581 dataId : `dict`
582 Data identifier with visit, ccd.
583 """
584 raise NotImplementedError()
586 def _computeCoaddExposureId(self, dataId, singleFilter):
587 """Compute the 64-bit (long) identifier for a coadd.
589 Subclasses must override
591 Parameters
592 ----------
593 dataId : `dict`
594 Data identifier with tract and patch.
595 singleFilter : `bool`
596 True means the desired ID is for a single-filter coadd, in which
597 case dataIdmust contain filter.
598 """
599 raise NotImplementedError()
601 def _search(self, path):
602 """Search for path in the associated repository's storage.
604 Parameters
605 ----------
606 path : string
607 Path that describes an object in the repository associated with
608 this mapper.
609 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
610 indicator will be stripped when searching and so will match
611 filenames without the HDU indicator, e.g. 'foo.fits'. The path
612 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
614 Returns
615 -------
616 string
617 The path for this object in the repository. Will return None if the
618 object can't be found. If the input argument path contained an HDU
619 indicator, the returned path will also contain the HDU indicator.
620 """
621 return self.rootStorage.search(path)
623 def backup(self, datasetType, dataId):
624 """Rename any existing object with the given type and dataId.
626 The CameraMapper implementation saves objects in a sequence of e.g.:
628 - foo.fits
629 - foo.fits~1
630 - foo.fits~2
632 All of the backups will be placed in the output repo, however, and will
633 not be removed if they are found elsewhere in the _parent chain. This
634 means that the same file will be stored twice if the previous version
635 was found in an input repo.
636 """
638 # Calling PosixStorage directly is not the long term solution in this
639 # function, this is work-in-progress on epic DM-6225. The plan is for
640 # parentSearch to be changed to 'search', and search only the storage
641 # associated with this mapper. All searching of parents will be handled
642 # by traversing the container of repositories in Butler.
644 def firstElement(list):
645 """Get the first element in the list, or None if that can't be
646 done.
647 """
648 return list[0] if list is not None and len(list) else None
650 n = 0
651 newLocation = self.map(datasetType, dataId, write=True)
652 newPath = newLocation.getLocations()[0]
653 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
654 path = firstElement(path)
655 oldPaths = []
656 while path is not None:
657 n += 1
658 oldPaths.append((n, path))
659 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
660 path = firstElement(path)
661 for n, oldPath in reversed(oldPaths):
662 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
664 def keys(self):
665 """Return supported keys.
667 Returns
668 -------
669 iterable
670 List of keys usable in a dataset identifier
671 """
672 return iter(self.keyDict.keys())
674 def getKeys(self, datasetType, level):
675 """Return a dict of supported keys and their value types for a given
676 dataset type at a given level of the key hierarchy.
678 Parameters
679 ----------
680 datasetType : `str`
681 Dataset type or None for all dataset types.
682 level : `str` or None
683 Level or None for all levels or '' for the default level for the
684 camera.
686 Returns
687 -------
688 `dict`
689 Keys are strings usable in a dataset identifier, values are their
690 value types.
691 """
693 # not sure if this is how we want to do this. what if None was
694 # intended?
695 if level == '':
696 level = self.getDefaultLevel()
698 if datasetType is None:
699 keyDict = copy.copy(self.keyDict)
700 else:
701 keyDict = self.mappings[datasetType].keys()
702 if level is not None and level in self.levels:
703 keyDict = copy.copy(keyDict)
704 for lev in self.levels[level]:
705 if lev in keyDict:
706 del keyDict[lev]
707 return keyDict
709 def getDefaultLevel(self):
710 return self.defaultLevel
712 def getDefaultSubLevel(self, level):
713 if level in self.defaultSubLevels:
714 return self.defaultSubLevels[level]
715 return None
717 @classmethod
718 def getCameraName(cls):
719 """Return the name of the camera that this CameraMapper is for."""
720 className = str(cls)
721 className = className[className.find('.'):-1]
722 m = re.search(r'(\w+)Mapper', className)
723 if m is None:
724 m = re.search(r"class '[\w.]*?(\w+)'", className)
725 name = m.group(1)
726 return name[:1].lower() + name[1:] if name else ''
728 @classmethod
729 def getPackageName(cls):
730 """Return the name of the package containing this CameraMapper."""
731 if cls.packageName is None:
732 raise ValueError('class variable packageName must not be None')
733 return cls.packageName
735 @classmethod
736 def getGen3Instrument(cls):
737 """Return the gen3 Instrument class equivalent for this gen2 Mapper.
739 Returns
740 -------
741 instr : `type`
742 A `~lsst.obs.base.Instrument` class.
743 """
744 if cls._gen3instrument is None:
745 raise NotImplementedError("Please provide a specific implementation for your instrument"
746 " to enable conversion of this gen2 repository to gen3")
747 if isinstance(cls._gen3instrument, str):
748 # Given a string to convert to an instrument class
749 cls._gen3instrument = doImport(cls._gen3instrument)
750 if not issubclass(cls._gen3instrument, Instrument):
751 raise ValueError(f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}"
752 " but that is not an lsst.obs.base.Instrument")
753 return cls._gen3instrument
755 @classmethod
756 def getPackageDir(cls):
757 """Return the base directory of this package"""
758 return getPackageDir(cls.getPackageName())
760 def map_camera(self, dataId, write=False):
761 """Map a camera dataset."""
762 if self.camera is None:
763 raise RuntimeError("No camera dataset available.")
764 actualId = self._transformId(dataId)
765 return dafPersist.ButlerLocation(
766 pythonType="lsst.afw.cameraGeom.CameraConfig",
767 cppType="Config",
768 storageName="ConfigStorage",
769 locationList=self.cameraDataLocation or "ignored",
770 dataId=actualId,
771 mapper=self,
772 storage=self.rootStorage
773 )
775 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
776 """Return the (preloaded) camera object.
777 """
778 if self.camera is None:
779 raise RuntimeError("No camera dataset available.")
780 return self.camera
782 def map_expIdInfo(self, dataId, write=False):
783 return dafPersist.ButlerLocation(
784 pythonType="lsst.obs.base.ExposureIdInfo",
785 cppType=None,
786 storageName="Internal",
787 locationList="ignored",
788 dataId=dataId,
789 mapper=self,
790 storage=self.rootStorage
791 )
793 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
794 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
795 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
796 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
797 return ExposureIdInfo(expId=expId, expBits=expBits)
799 def std_bfKernel(self, item, dataId):
800 """Disable standardization for bfKernel
802 bfKernel is a calibration product that is numpy array,
803 unlike other calibration products that are all images;
804 all calibration images are sent through _standardizeExposure
805 due to CalibrationMapping, but we don't want that to happen to bfKernel
806 """
807 return item
809 def std_raw(self, item, dataId):
810 """Standardize a raw dataset by converting it to an Exposure instead
811 of an Image"""
812 return self._standardizeExposure(self.exposures['raw'], item, dataId,
813 trimmed=False, setVisitInfo=True, setExposureId=True)
815 def map_skypolicy(self, dataId):
816 """Map a sky policy."""
817 return dafPersist.ButlerLocation("lsst.pex.policy.Policy", "Policy",
818 "Internal", None, None, self,
819 storage=self.rootStorage)
821 def std_skypolicy(self, item, dataId):
822 """Standardize a sky policy by returning the one we use."""
823 return self.skypolicy
825###############################################################################
826#
827# Utility functions
828#
829###############################################################################
831 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
832 posixIfNoSql=True):
833 """Set up a registry (usually SQLite3), trying a number of possible
834 paths.
836 Parameters
837 ----------
838 name : string
839 Name of registry.
840 description: `str`
841 Description of registry (for log messages)
842 path : string
843 Path for registry.
844 policy : string
845 Policy that contains the registry name, used if path is None.
846 policyKey : string
847 Key in policy for registry path.
848 storage : Storage subclass
849 Repository Storage to look in.
850 searchParents : bool, optional
851 True if the search for a registry should follow any Butler v1
852 _parent symlinks.
853 posixIfNoSql : bool, optional
854 If an sqlite registry is not found, will create a posix registry if
855 this is True.
857 Returns
858 -------
859 lsst.daf.persistence.Registry
860 Registry object
861 """
862 if path is None and policyKey in policy:
863 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
864 if os.path.isabs(path):
865 raise RuntimeError("Policy should not indicate an absolute path for registry.")
866 if not storage.exists(path):
867 newPath = storage.instanceSearch(path)
869 newPath = newPath[0] if newPath is not None and len(newPath) else None
870 if newPath is None:
871 self.log.warning("Unable to locate registry at policy path (also looked in root): %s",
872 path)
873 path = newPath
874 else:
875 self.log.warning("Unable to locate registry at policy path: %s", path)
876 path = None
878 # Old Butler API was to indicate the registry WITH the repo folder,
879 # New Butler expects the registry to be in the repo folder. To support
880 # Old API, check to see if path starts with root, and if so, strip
881 # root from path. Currently only works with PosixStorage
882 try:
883 root = storage.root
884 if path and (path.startswith(root)):
885 path = path[len(root + '/'):]
886 except AttributeError:
887 pass
889 # determine if there is an sqlite registry and if not, try the posix
890 # registry.
891 registry = None
893 def search(filename, description):
894 """Search for file in storage
896 Parameters
897 ----------
898 filename : `str`
899 Filename to search for
900 description : `str`
901 Description of file, for error message.
903 Returns
904 -------
905 path : `str` or `None`
906 Path to file, or None
907 """
908 result = storage.instanceSearch(filename)
909 if result:
910 return result[0]
911 self.log.debug("Unable to locate %s: %s", description, filename)
912 return None
914 # Search for a suitable registry database
915 if path is None:
916 path = search("%s.pgsql" % name, "%s in root" % description)
917 if path is None:
918 path = search("%s.sqlite3" % name, "%s in root" % description)
919 if path is None:
920 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
922 if path is not None:
923 if not storage.exists(path):
924 newPath = storage.instanceSearch(path)
925 newPath = newPath[0] if newPath is not None and len(newPath) else None
926 if newPath is not None:
927 path = newPath
928 localFileObj = storage.getLocalFile(path)
929 self.log.info("Loading %s registry from %s", description, localFileObj.name)
930 registry = dafPersist.Registry.create(localFileObj.name)
931 localFileObj.close()
932 elif not registry and posixIfNoSql:
933 try:
934 self.log.info("Loading Posix %s registry from %s", description, storage.root)
935 registry = dafPersist.PosixRegistry(storage.root)
936 except Exception:
937 registry = None
939 return registry
941 def _transformId(self, dataId):
942 """Generate a standard ID dict from a camera-specific ID dict.
944 Canonical keys include:
945 - amp: amplifier name
946 - ccd: CCD name (in LSST this is a combination of raft and sensor)
947 The default implementation returns a copy of its input.
949 Parameters
950 ----------
951 dataId : `dict`
952 Dataset identifier; this must not be modified
954 Returns
955 -------
956 `dict`
957 Transformed dataset identifier.
958 """
960 return dataId.copy()
962 def _mapActualToPath(self, template, actualId):
963 """Convert a template path to an actual path, using the actual data
964 identifier. This implementation is usually sufficient but can be
965 overridden by the subclass.
967 Parameters
968 ----------
969 template : `str`
970 Template path
971 actualId : `dict`
972 Dataset identifier
974 Returns
975 -------
976 `str`
977 Pathname
978 """
980 try:
981 transformedId = self._transformId(actualId)
982 return template % transformedId
983 except Exception as e:
984 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
986 @staticmethod
987 def getShortCcdName(ccdName):
988 """Convert a CCD name to a form useful as a filename
990 The default implementation converts spaces to underscores.
991 """
992 return ccdName.replace(" ", "_")
994 def _extractDetectorName(self, dataId):
995 """Extract the detector (CCD) name from the dataset identifier.
997 The name in question is the detector name used by lsst.afw.cameraGeom.
999 Parameters
1000 ----------
1001 dataId : `dict`
1002 Dataset identifier.
1004 Returns
1005 -------
1006 `str`
1007 Detector name
1008 """
1009 raise NotImplementedError("No _extractDetectorName() function specified")
1011 def _setAmpDetector(self, item, dataId, trimmed=True):
1012 """Set the detector object in an Exposure for an amplifier.
1014 Defects are also added to the Exposure based on the detector object.
1016 Parameters
1017 ----------
1018 item : `lsst.afw.image.Exposure`
1019 Exposure to set the detector in.
1020 dataId : `dict`
1021 Dataset identifier
1022 trimmed : `bool`
1023 Should detector be marked as trimmed? (ignored)
1024 """
1026 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1028 def _setCcdDetector(self, item, dataId, trimmed=True):
1029 """Set the detector object in an Exposure for a CCD.
1031 Parameters
1032 ----------
1033 item : `lsst.afw.image.Exposure`
1034 Exposure to set the detector in.
1035 dataId : `dict`
1036 Dataset identifier
1037 trimmed : `bool`
1038 Should detector be marked as trimmed? (ignored)
1039 """
1040 if item.getDetector() is not None:
1041 return
1043 detectorName = self._extractDetectorName(dataId)
1044 detector = self.camera[detectorName]
1045 item.setDetector(detector)
1047 @staticmethod
1048 def _resolveFilters(definitions, idFilter, filterLabel):
1049 """Identify the filter(s) consistent with partial filter information.
1051 Parameters
1052 ----------
1053 definitions : `lsst.obs.base.FilterDefinitionCollection`
1054 The filter definitions in which to search for filters.
1055 idFilter : `str` or `None`
1056 The filter information provided in a data ID.
1057 filterLabel : `lsst.afw.image.FilterLabel` or `None`
1058 The filter information provided by an exposure; may be incomplete.
1060 Returns
1061 -------
1062 filters : `set` [`lsst.obs.base.FilterDefinition`]
1063 The set of filters consistent with ``idFilter``
1064 and ``filterLabel``.
1065 """
1066 # Assume none of the filter constraints actually wrong/contradictory.
1067 # Then taking the intersection of all constraints will give a unique
1068 # result if one exists.
1069 matches = set(definitions)
1070 if idFilter is not None:
1071 matches.intersection_update(definitions.findAll(idFilter))
1072 if filterLabel is not None and filterLabel.hasPhysicalLabel():
1073 matches.intersection_update(definitions.findAll(filterLabel.physicalLabel))
1074 if filterLabel is not None and filterLabel.hasBandLabel():
1075 matches.intersection_update(definitions.findAll(filterLabel.bandLabel))
1076 return matches
1078 def _getBestFilter(self, storedLabel, idFilter):
1079 """Estimate the most complete filter information consistent with the
1080 file or registry.
1082 Parameters
1083 ----------
1084 storedLabel : `lsst.afw.image.FilterLabel` or `None`
1085 The filter previously stored in the file.
1086 idFilter : `str` or `None`
1087 The filter implied by the data ID, if any.
1089 Returns
1090 -------
1091 bestFitler : `lsst.afw.image.FilterLabel` or `None`
1092 The complete filter to describe the dataset. May be equal to
1093 ``storedLabel``. `None` if no recommendation can be generated.
1094 """
1095 try:
1096 # getGen3Instrument returns class; need to construct it.
1097 filterDefinitions = self.getGen3Instrument()().filterDefinitions
1098 except NotImplementedError:
1099 filterDefinitions = None
1101 if filterDefinitions is not None:
1102 definitions = self._resolveFilters(filterDefinitions, idFilter, storedLabel)
1103 self.log.debug("Matching filters for id=%r and label=%r are %s.",
1104 idFilter, storedLabel, definitions)
1105 if len(definitions) == 1:
1106 newLabel = list(definitions)[0].makeFilterLabel()
1107 return newLabel
1108 elif definitions:
1109 # Some instruments have many filters for the same band, of
1110 # which one is known by band name and the others always by
1111 # afw name (e.g., i, i2).
1112 nonAfw = {f for f in definitions if f.afw_name is None}
1113 if len(nonAfw) == 1:
1114 newLabel = list(nonAfw)[0].makeFilterLabel()
1115 self.log.debug("Assuming %r is the correct match.", newLabel)
1116 return newLabel
1118 self.log.warning("Multiple matches for filter %r with data ID %r.", storedLabel, idFilter)
1119 # Can we at least add a band?
1120 # Never expect multiple definitions with same physical filter.
1121 bands = {d.band for d in definitions} # None counts as separate result!
1122 if len(bands) == 1 and storedLabel is None:
1123 band = list(bands)[0]
1124 return afwImage.FilterLabel(band=band)
1125 else:
1126 return None
1127 else:
1128 # Unknown filter, nothing to be done.
1129 self.log.warning("Cannot reconcile filter %r with data ID %r.", storedLabel, idFilter)
1130 return None
1132 # Not practical to recommend a FilterLabel without filterDefinitions
1134 return None
1136 def _setFilter(self, mapping, item, dataId):
1137 """Set the filter information in an Exposure.
1139 The Exposure should already have had a filter loaded, but the reader
1140 (in ``afw``) had to act on incomplete information. This method
1141 cross-checks the filter against the data ID and the standard list
1142 of filters.
1144 Parameters
1145 ----------
1146 mapping : `lsst.obs.base.Mapping`
1147 Where to get the data ID filter from.
1148 item : `lsst.afw.image.Exposure`
1149 Exposure to set the filter in.
1150 dataId : `dict`
1151 Dataset identifier.
1152 """
1153 if not (isinstance(item, afwImage.ExposureU) or isinstance(item, afwImage.ExposureI)
1154 or isinstance(item, afwImage.ExposureF) or isinstance(item, afwImage.ExposureD)):
1155 return
1157 itemFilter = item.getFilterLabel() # may be None
1158 try:
1159 idFilter = mapping.need(['filter'], dataId)['filter']
1160 except dafPersist.NoResults:
1161 idFilter = None
1163 bestFilter = self._getBestFilter(itemFilter, idFilter)
1164 if bestFilter is not None:
1165 if bestFilter != itemFilter:
1166 item.setFilterLabel(bestFilter)
1167 # Already using bestFilter, avoid unnecessary edits
1168 elif itemFilter is None:
1169 # Old Filter cleanup, without the benefit of FilterDefinition
1170 if self.filters is not None and idFilter in self.filters:
1171 idFilter = self.filters[idFilter]
1172 try:
1173 # TODO: remove in DM-27177; at that point may not be able
1174 # to process IDs without FilterDefinition.
1175 with warnings.catch_warnings():
1176 warnings.filterwarnings("ignore", category=FutureWarning)
1177 item.setFilter(afwImage.Filter(idFilter))
1178 except pexExcept.NotFoundError:
1179 self.log.warning("Filter %s not defined. Set to UNKNOWN.", idFilter)
1181 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1182 trimmed=True, setVisitInfo=True, setExposureId=False):
1183 """Default standardization function for images.
1185 This sets the Detector from the camera geometry
1186 and optionally set the Filter. In both cases this saves
1187 having to persist some data in each exposure (or image).
1189 Parameters
1190 ----------
1191 mapping : `lsst.obs.base.Mapping`
1192 Where to get the values from.
1193 item : image-like object
1194 Can be any of lsst.afw.image.Exposure,
1195 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1196 or lsst.afw.image.MaskedImage
1198 dataId : `dict`
1199 Dataset identifier
1200 filter : `bool`
1201 Set filter? Ignored if item is already an exposure
1202 trimmed : `bool`
1203 Should detector be marked as trimmed?
1204 setVisitInfo : `bool`
1205 Should Exposure have its VisitInfo filled out from the metadata?
1206 setExposureId : `bool`
1207 Should Exposure have its exposure ID filled out from the data ID?
1209 Returns
1210 -------
1211 `lsst.afw.image.Exposure`
1212 The standardized Exposure.
1213 """
1214 try:
1215 exposure = exposureFromImage(item, dataId, mapper=self, logger=self.log,
1216 setVisitInfo=setVisitInfo, setFilter=filter,
1217 setExposureId=setExposureId)
1218 except Exception as e:
1219 self.log.error("Could not turn item=%r into an exposure: %s", item, e)
1220 raise
1222 if mapping.level.lower() == "amp":
1223 self._setAmpDetector(exposure, dataId, trimmed)
1224 elif mapping.level.lower() == "ccd":
1225 self._setCcdDetector(exposure, dataId, trimmed)
1227 # We can only create a WCS if it doesn't already have one and
1228 # we have either a VisitInfo or exposure metadata.
1229 # Do not calculate a WCS if this is an amplifier exposure
1230 if mapping.level.lower() != "amp" and exposure.getWcs() is None and \
1231 (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict()):
1232 self._createInitialSkyWcs(exposure)
1234 if filter:
1235 self._setFilter(mapping, exposure, dataId)
1237 return exposure
1239 def _createSkyWcsFromMetadata(self, exposure):
1240 """Create a SkyWcs from the FITS header metadata in an Exposure.
1242 Parameters
1243 ----------
1244 exposure : `lsst.afw.image.Exposure`
1245 The exposure to get metadata from, and attach the SkyWcs to.
1246 """
1247 metadata = exposure.getMetadata()
1248 fix_header(metadata, translator_class=self.translatorClass)
1249 try:
1250 wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1251 exposure.setWcs(wcs)
1252 except pexExcept.TypeError as e:
1253 # See DM-14372 for why this is debug and not warn (e.g. calib
1254 # files without wcs metadata).
1255 self.log.debug("wcs set to None; missing information found in metadata to create a valid wcs:"
1256 " %s", e.args[0])
1257 # ensure any WCS values stripped from the metadata are removed in the
1258 # exposure
1259 exposure.setMetadata(metadata)
1261 def _createInitialSkyWcs(self, exposure):
1262 """Create a SkyWcs from the boresight and camera geometry.
1264 If the boresight or camera geometry do not support this method of
1265 WCS creation, this falls back on the header metadata-based version
1266 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1268 Parameters
1269 ----------
1270 exposure : `lsst.afw.image.Exposure`
1271 The exposure to get data from, and attach the SkyWcs to.
1272 """
1273 # Always use try to use metadata first, to strip WCS keys from it.
1274 self._createSkyWcsFromMetadata(exposure)
1276 if exposure.getInfo().getVisitInfo() is None:
1277 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1278 self.log.warning(msg)
1279 return
1280 try:
1281 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1282 exposure.setWcs(newSkyWcs)
1283 except InitialSkyWcsError as e:
1284 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1285 self.log.warning(msg, e)
1286 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1287 if e.__context__ is not None:
1288 self.log.debug("Root-cause Exception was: %s",
1289 traceback.TracebackException.from_exception(e.__context__))
1291 def _makeCamera(self, policy, repositoryDir):
1292 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1293 the camera geometry
1295 Also set self.cameraDataLocation, if relevant (else it can be left
1296 None).
1298 This implementation assumes that policy contains an entry "camera"
1299 that points to the subdirectory in this package of camera data;
1300 specifically, that subdirectory must contain:
1301 - a file named `camera.py` that contains persisted camera config
1302 - ampInfo table FITS files, as required by
1303 lsst.afw.cameraGeom.makeCameraFromPath
1305 Parameters
1306 ----------
1307 policy : `lsst.daf.persistence.Policy`
1308 Policy with per-camera defaults already merged
1309 (PexPolicy only for backward compatibility).
1310 repositoryDir : `str`
1311 Policy repository for the subclassing module (obtained with
1312 getRepositoryPath() on the per-camera default dictionary).
1313 """
1314 if 'camera' not in policy:
1315 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1316 cameraDataSubdir = policy['camera']
1317 self.cameraDataLocation = os.path.normpath(
1318 os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1319 cameraConfig = afwCameraGeom.CameraConfig()
1320 cameraConfig.load(self.cameraDataLocation)
1321 ampInfoPath = os.path.dirname(self.cameraDataLocation)
1322 return afwCameraGeom.makeCameraFromPath(
1323 cameraConfig=cameraConfig,
1324 ampInfoPath=ampInfoPath,
1325 shortNameFunc=self.getShortCcdName,
1326 pupilFactoryClass=self.PupilFactoryClass
1327 )
1329 def getRegistry(self):
1330 """Get the registry used by this mapper.
1332 Returns
1333 -------
1334 Registry or None
1335 The registry used by this mapper for this mapper's repository.
1336 """
1337 return self.registry
1339 def getImageCompressionSettings(self, datasetType, dataId):
1340 """Stuff image compression settings into a daf.base.PropertySet
1342 This goes into the ButlerLocation's "additionalData", which gets
1343 passed into the boost::persistence framework.
1345 Parameters
1346 ----------
1347 datasetType : `str`
1348 Type of dataset for which to get the image compression settings.
1349 dataId : `dict`
1350 Dataset identifier.
1352 Returns
1353 -------
1354 additionalData : `lsst.daf.base.PropertySet`
1355 Image compression settings.
1356 """
1357 mapping = self.mappings[datasetType]
1358 recipeName = mapping.recipe
1359 storageType = mapping.storage
1360 if storageType not in self._writeRecipes:
1361 return dafBase.PropertySet()
1362 if recipeName not in self._writeRecipes[storageType]:
1363 raise RuntimeError("Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1364 (datasetType, storageType, recipeName))
1365 recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1366 seed = hash(tuple(dataId.items())) % 2**31
1367 for plane in ("image", "mask", "variance"):
1368 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1369 recipe.set(plane + ".scaling.seed", seed)
1370 return recipe
1372 def _initWriteRecipes(self):
1373 """Read the recipes for writing files
1375 These recipes are currently used for configuring FITS compression,
1376 but they could have wider uses for configuring different flavors
1377 of the storage types. A recipe is referred to by a symbolic name,
1378 which has associated settings. These settings are stored as a
1379 `PropertySet` so they can easily be passed down to the
1380 boost::persistence framework as the "additionalData" parameter.
1382 The list of recipes is written in YAML. A default recipe and
1383 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1384 and these may be overridden or supplemented by the individual obs_*
1385 packages' own policy/writeRecipes.yaml files.
1387 Recipes are grouped by the storage type. Currently, only the
1388 ``FitsStorage`` storage type uses recipes, which uses it to
1389 configure FITS image compression.
1391 Each ``FitsStorage`` recipe for FITS compression should define
1392 "image", "mask" and "variance" entries, each of which may contain
1393 "compression" and "scaling" entries. Defaults will be provided for
1394 any missing elements under "compression" and "scaling".
1396 The allowed entries under "compression" are:
1398 * algorithm (string): compression algorithm to use
1399 * rows (int): number of rows per tile (0 = entire dimension)
1400 * columns (int): number of columns per tile (0 = entire dimension)
1401 * quantizeLevel (float): cfitsio quantization level
1403 The allowed entries under "scaling" are:
1405 * algorithm (string): scaling algorithm to use
1406 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1407 * fuzz (bool): fuzz the values when quantising floating-point values?
1408 * seed (long): seed for random number generator when fuzzing
1409 * maskPlanes (list of string): mask planes to ignore when doing
1410 statistics
1411 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1412 * quantizePad: number of stdev to allow on the low side (for
1413 STDEV_POSITIVE/NEGATIVE)
1414 * bscale: manually specified BSCALE (for MANUAL scaling)
1415 * bzero: manually specified BSCALE (for MANUAL scaling)
1417 A very simple example YAML recipe:
1419 FitsStorage:
1420 default:
1421 image: &default
1422 compression:
1423 algorithm: GZIP_SHUFFLE
1424 mask: *default
1425 variance: *default
1426 """
1427 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1428 recipes = dafPersist.Policy(recipesFile)
1429 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1430 validationMenu = {'FitsStorage': validateRecipeFitsStorage, }
1431 if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1432 supplements = dafPersist.Policy(supplementsFile)
1433 # Don't allow overrides, only supplements
1434 for entry in validationMenu:
1435 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1436 if intersection:
1437 raise RuntimeError("Recipes provided in %s section %s may not override those in %s: %s" %
1438 (supplementsFile, entry, recipesFile, intersection))
1439 recipes.update(supplements)
1441 self._writeRecipes = {}
1442 for storageType in recipes.names(True):
1443 if "default" not in recipes[storageType]:
1444 raise RuntimeError("No 'default' recipe defined for storage type %s in %s" %
1445 (storageType, recipesFile))
1446 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1449def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True, setFilter=False,
1450 setExposureId=False):
1451 """Generate an Exposure from an image-like object
1453 If the image is a DecoratedImage then also set its metadata
1454 (Image and MaskedImage are missing the necessary metadata
1455 and Exposure already has those set)
1457 Parameters
1458 ----------
1459 image : Image-like object
1460 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1461 Exposure.
1462 dataId : `dict`, optional
1463 The data ID identifying the visit of the image.
1464 mapper : `lsst.obs.base.CameraMapper`, optional
1465 The mapper with which to convert the image.
1466 logger : `lsst.log.Log`, optional
1467 An existing logger to which to send output.
1468 setVisitInfo : `bool`, optional
1469 If `True`, create and attach a `lsst.afw.image.VisitInfo` to the
1470 result. Ignored if ``image`` is an `~lsst.afw.image.Exposure` with an
1471 existing ``VisitInfo``.
1472 setFilter : `bool`, optional
1473 If `True`, create and attach a `lsst.afw.image.FilterLabel` to the
1474 result. Converts non-``FilterLabel`` information provided in ``image``.
1475 Ignored if ``image`` is an `~lsst.afw.image.Exposure` with existing
1476 filter information.
1477 setExposureId : `bool`, optional
1478 If `True`, create and set an exposure ID from ``dataID``. Ignored if
1479 ``image`` is an `~lsst.afw.image.Exposure` with an existing ID.
1481 Returns
1482 -------
1483 `lsst.afw.image.Exposure`
1484 Exposure containing input image.
1485 """
1486 translatorClass = None
1487 if mapper is not None:
1488 translatorClass = mapper.translatorClass
1490 metadata = None
1491 if isinstance(image, afwImage.MaskedImage):
1492 exposure = afwImage.makeExposure(image)
1493 elif isinstance(image, afwImage.DecoratedImage):
1494 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1495 metadata = image.getMetadata()
1496 fix_header(metadata, translator_class=translatorClass)
1497 exposure.setMetadata(metadata)
1498 elif isinstance(image, afwImage.Exposure):
1499 exposure = image
1500 metadata = exposure.getMetadata()
1501 fix_header(metadata, translator_class=translatorClass)
1502 else: # Image
1503 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1505 # set exposure ID if we can
1506 if setExposureId and not exposure.info.hasId() and mapper is not None:
1507 try:
1508 exposureId = mapper._computeCcdExposureId(dataId)
1509 exposure.info.id = exposureId
1510 except NotImplementedError:
1511 logger.warning("Could not set exposure ID; mapper does not support it.")
1513 if metadata is not None:
1514 # set filter if we can
1515 if setFilter and mapper is not None and exposure.getFilterLabel() is None:
1516 # Translate whatever was in the metadata
1517 if 'FILTER' in metadata:
1518 oldFilter = metadata['FILTER']
1519 idFilter = dataId['filter'] if 'filter' in dataId else None
1520 # oldFilter may not be physical, but _getBestFilter always goes
1521 # through the FilterDefinitions instead of returning
1522 # unvalidated input.
1523 filter = mapper._getBestFilter(afwImage.FilterLabel(physical=oldFilter), idFilter)
1524 if filter is not None:
1525 exposure.setFilterLabel(filter)
1526 # set VisitInfo if we can
1527 if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1528 if mapper is None:
1529 if not logger:
1530 logger = lsstLog.Log.getLogger("CameraMapper")
1531 logger.warn("I can only set the VisitInfo if you provide a mapper")
1532 else:
1533 exposureId = mapper._computeCcdExposureId(dataId)
1534 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1536 exposure.getInfo().setVisitInfo(visitInfo)
1538 return exposure
1541def validateRecipeFitsStorage(recipes):
1542 """Validate recipes for FitsStorage
1544 The recipes are supplemented with default values where appropriate.
1546 TODO: replace this custom validation code with Cerberus (DM-11846)
1548 Parameters
1549 ----------
1550 recipes : `lsst.daf.persistence.Policy`
1551 FitsStorage recipes to validate.
1553 Returns
1554 -------
1555 validated : `lsst.daf.base.PropertySet`
1556 Validated FitsStorage recipe.
1558 Raises
1559 ------
1560 `RuntimeError`
1561 If validation fails.
1562 """
1563 # Schemas define what should be there, and the default values (and by the
1564 # default value, the expected type).
1565 compressionSchema = {
1566 "algorithm": "NONE",
1567 "rows": 1,
1568 "columns": 0,
1569 "quantizeLevel": 0.0,
1570 }
1571 scalingSchema = {
1572 "algorithm": "NONE",
1573 "bitpix": 0,
1574 "maskPlanes": ["NO_DATA"],
1575 "seed": 0,
1576 "quantizeLevel": 4.0,
1577 "quantizePad": 5.0,
1578 "fuzz": True,
1579 "bscale": 1.0,
1580 "bzero": 0.0,
1581 }
1583 def checkUnrecognized(entry, allowed, description):
1584 """Check to see if the entry contains unrecognised keywords"""
1585 unrecognized = set(entry.keys()) - set(allowed)
1586 if unrecognized:
1587 raise RuntimeError(
1588 "Unrecognized entries when parsing image compression recipe %s: %s" %
1589 (description, unrecognized))
1591 validated = {}
1592 for name in recipes.names(True):
1593 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1594 rr = dafBase.PropertySet()
1595 validated[name] = rr
1596 for plane in ("image", "mask", "variance"):
1597 checkUnrecognized(recipes[name][plane], ["compression", "scaling"],
1598 name + "->" + plane)
1600 for settings, schema in (("compression", compressionSchema),
1601 ("scaling", scalingSchema)):
1602 prefix = plane + "." + settings
1603 if settings not in recipes[name][plane]:
1604 for key in schema:
1605 rr.set(prefix + "." + key, schema[key])
1606 continue
1607 entry = recipes[name][plane][settings]
1608 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1609 for key in schema:
1610 value = type(schema[key])(entry[key]) if key in entry else schema[key]
1611 rr.set(prefix + "." + key, value)
1612 return validated