Coverage for python/lsst/obs/base/cameraMapper.py: 9%
606 statements
« prev ^ index » next coverage.py v6.4.1, created at 2022-06-23 02:50 -0700
« prev ^ index » next coverage.py v6.4.1, created at 2022-06-23 02:50 -0700
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import copy
23import os
24import re
25import traceback
26import weakref
28import lsst.afw.cameraGeom as afwCameraGeom
29import lsst.afw.geom as afwGeom
30import lsst.afw.image as afwImage
31import lsst.daf.base as dafBase
32import lsst.daf.persistence as dafPersist
33import lsst.log as lsstLog
34import lsst.pex.exceptions as pexExcept
35from astro_metadata_translator import fix_header
36from deprecated.sphinx import deprecated
37from lsst.afw.fits import readMetadata
38from lsst.afw.table import Schema
39from lsst.utils import doImportType, getPackageDir
41from ._instrument import Instrument
42from .exposureIdInfo import ExposureIdInfo
43from .makeRawVisitInfo import MakeRawVisitInfo
44from .mapping import CalibrationMapping, DatasetMapping, ExposureMapping, ImageMapping
45from .utils import InitialSkyWcsError, createInitialSkyWcs
47__all__ = ["CameraMapper", "exposureFromImage"]
50class CameraMapper(dafPersist.Mapper):
52 """CameraMapper is a base class for mappers that handle images from a
53 camera and products derived from them. This provides an abstraction layer
54 between the data on disk and the code.
56 Public methods: keys, queryMetadata, getDatasetTypes, map,
57 canStandardize, standardize
59 Mappers for specific data sources (e.g., CFHT Megacam, LSST
60 simulations, etc.) should inherit this class.
62 The CameraMapper manages datasets within a "root" directory. Note that
63 writing to a dataset present in the input root will hide the existing
64 dataset but not overwrite it. See #2160 for design discussion.
66 A camera is assumed to consist of one or more rafts, each composed of
67 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
68 (amps). A camera is also assumed to have a camera geometry description
69 (CameraGeom object) as a policy file, a filter description (Filter class
70 static configuration) as another policy file.
72 Information from the camera geometry and defects are inserted into all
73 Exposure objects returned.
75 The mapper uses one or two registries to retrieve metadata about the
76 images. The first is a registry of all raw exposures. This must contain
77 the time of the observation. One or more tables (or the equivalent)
78 within the registry are used to look up data identifier components that
79 are not specified by the user (e.g. filter) and to return results for
80 metadata queries. The second is an optional registry of all calibration
81 data. This should contain validity start and end entries for each
82 calibration dataset in the same timescale as the observation time.
84 Subclasses will typically set MakeRawVisitInfoClass and optionally the
85 metadata translator class:
87 MakeRawVisitInfoClass: a class variable that points to a subclass of
88 MakeRawVisitInfo, a functor that creates an
89 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
91 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
92 class to use for fixing metadata values. If it is not set an attempt
93 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
94 that the metadata fixup will try to infer the translator class from the
95 header itself.
97 Subclasses must provide the following methods:
99 _extractDetectorName(self, dataId): returns the detector name for a CCD
100 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
101 a dataset identifier referring to that CCD or a subcomponent of it.
103 _computeCcdExposureId(self, dataId): see below
105 _computeCoaddExposureId(self, dataId, singleFilter): see below
107 Subclasses may also need to override the following methods:
109 _transformId(self, dataId): transformation of a data identifier
110 from colloquial usage (e.g., "ccdname") to proper/actual usage
111 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
112 commas). The default implementation does nothing. Note that this
113 method should not modify its input parameter.
115 getShortCcdName(self, ccdName): a static method that returns a shortened
116 name suitable for use as a filename. The default version converts spaces
117 to underscores.
119 _mapActualToPath(self, template, actualId): convert a template path to an
120 actual path, using the actual dataset identifier.
122 The mapper's behaviors are largely specified by the policy file.
123 See the MapperDictionary.paf for descriptions of the available items.
125 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
126 mappings (see Mappings class).
128 Common default mappings for all subclasses can be specified in the
129 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
130 provides a simple way to add a product to all camera mappers.
132 Functions to map (provide a path to the data given a dataset
133 identifier dictionary) and standardize (convert data into some standard
134 format or type) may be provided in the subclass as "map_{dataset type}"
135 and "std_{dataset type}", respectively.
137 If non-Exposure datasets cannot be retrieved using standard
138 daf_persistence methods alone, a "bypass_{dataset type}" function may be
139 provided in the subclass to return the dataset instead of using the
140 "datasets" subpolicy.
142 Implementations of map_camera and bypass_camera that should typically be
143 sufficient are provided in this base class.
145 Notes
146 -----
147 .. todo::
149 Instead of auto-loading the camera at construction time, load it from
150 the calibration registry
152 Parameters
153 ----------
154 policy : daf_persistence.Policy,
155 Policy with per-camera defaults already merged.
156 repositoryDir : string
157 Policy repository for the subclassing module (obtained with
158 getRepositoryPath() on the per-camera default dictionary).
159 root : string, optional
160 Path to the root directory for data.
161 registry : string, optional
162 Path to registry with data's metadata.
163 calibRoot : string, optional
164 Root directory for calibrations.
165 calibRegistry : string, optional
166 Path to registry with calibrations' metadata.
167 provided : list of string, optional
168 Keys provided by the mapper.
169 parentRegistry : Registry subclass, optional
170 Registry from a parent repository that may be used to look up
171 data's metadata.
172 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
173 The configuration information for the repository this mapper is
174 being used with.
175 """
177 packageName = None
179 # a class or subclass of MakeRawVisitInfo, a functor that makes an
180 # lsst.afw.image.VisitInfo from the FITS metadata of a raw image
181 MakeRawVisitInfoClass = MakeRawVisitInfo
183 # a class or subclass of PupilFactory
184 PupilFactoryClass = afwCameraGeom.PupilFactory
186 # Class to use for metadata translations
187 translatorClass = None
189 # Gen3 instrument corresponding to this mapper
190 # Can be a class or a string with the full name of the class
191 _gen3instrument = None
193 def __init__(
194 self,
195 policy,
196 repositoryDir,
197 root=None,
198 registry=None,
199 calibRoot=None,
200 calibRegistry=None,
201 provided=None,
202 parentRegistry=None,
203 repositoryCfg=None,
204 ):
206 dafPersist.Mapper.__init__(self)
208 self.log = lsstLog.Log.getLogger("lsst.CameraMapper")
210 if root:
211 self.root = root
212 elif repositoryCfg:
213 self.root = repositoryCfg.root
214 else:
215 self.root = None
217 repoPolicy = repositoryCfg.policy if repositoryCfg else None
218 if repoPolicy is not None:
219 policy.update(repoPolicy)
221 # Levels
222 self.levels = dict()
223 if "levels" in policy:
224 levelsPolicy = policy["levels"]
225 for key in levelsPolicy.names(True):
226 self.levels[key] = set(levelsPolicy.asArray(key))
227 self.defaultLevel = policy["defaultLevel"]
228 self.defaultSubLevels = dict()
229 if "defaultSubLevels" in policy:
230 self.defaultSubLevels = policy["defaultSubLevels"]
232 # Root directories
233 if root is None:
234 root = "."
235 root = dafPersist.LogicalLocation(root).locString()
237 self.rootStorage = dafPersist.Storage.makeFromURI(uri=root)
239 # If the calibRoot is passed in, use that. If not and it's indicated in
240 # the policy, use that. And otherwise, the calibs are in the regular
241 # root.
242 # If the location indicated by the calib root does not exist, do not
243 # create it.
244 calibStorage = None
245 if calibRoot is not None:
246 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
247 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, create=False)
248 else:
249 calibRoot = policy.get("calibRoot", None)
250 if calibRoot:
251 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot, create=False)
252 if calibStorage is None:
253 calibStorage = self.rootStorage
255 self.root = root
257 # Registries
258 self.registry = self._setupRegistry(
259 "registry",
260 "exposure",
261 registry,
262 policy,
263 "registryPath",
264 self.rootStorage,
265 searchParents=False,
266 posixIfNoSql=(not parentRegistry),
267 )
268 if not self.registry:
269 self.registry = parentRegistry
270 needCalibRegistry = policy.get("needCalibRegistry", None)
271 if needCalibRegistry:
272 if calibStorage:
273 self.calibRegistry = self._setupRegistry(
274 "calibRegistry",
275 "calib",
276 calibRegistry,
277 policy,
278 "calibRegistryPath",
279 calibStorage,
280 posixIfNoSql=False,
281 ) # NB never use posix for calibs
282 else:
283 raise RuntimeError(
284 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
285 f"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}"
286 )
287 else:
288 self.calibRegistry = None
290 # Dict of valid keys and their value types
291 self.keyDict = dict()
293 self._initMappings(policy, self.rootStorage, calibStorage, provided=None)
294 self._initWriteRecipes()
296 # Camera geometry
297 self.cameraDataLocation = None # path to camera geometry config file
298 self.camera = self._makeCamera(policy=policy, repositoryDir=repositoryDir)
300 # Filter translation table
301 self.filters = None
303 # verify that the class variable packageName is set before attempting
304 # to instantiate an instance
305 if self.packageName is None:
306 raise ValueError("class variable packageName must not be None")
308 self.makeRawVisitInfo = self.MakeRawVisitInfoClass(log=self.log)
310 # Assign a metadata translator if one has not been defined by
311 # subclass. We can sometimes infer one from the RawVisitInfo
312 # class.
313 if self.translatorClass is None and hasattr(self.makeRawVisitInfo, "metadataTranslator"):
314 self.translatorClass = self.makeRawVisitInfo.metadataTranslator
316 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
317 """Initialize mappings
319 For each of the dataset types that we want to be able to read, there
320 are methods that can be created to support them:
321 * map_<dataset> : determine the path for dataset
322 * std_<dataset> : standardize the retrieved dataset
323 * bypass_<dataset> : retrieve the dataset (bypassing the usual
324 retrieval machinery)
325 * query_<dataset> : query the registry
327 Besides the dataset types explicitly listed in the policy, we create
328 additional, derived datasets for additional conveniences,
329 e.g., reading the header of an image, retrieving only the size of a
330 catalog.
332 Parameters
333 ----------
334 policy : `lsst.daf.persistence.Policy`
335 Policy with per-camera defaults already merged
336 rootStorage : `Storage subclass instance`
337 Interface to persisted repository data.
338 calibRoot : `Storage subclass instance`
339 Interface to persisted calib repository data
340 provided : `list` of `str`
341 Keys provided by the mapper
342 """
343 # Sub-dictionaries (for exposure/calibration/dataset types)
344 imgMappingPolicy = dafPersist.Policy(
345 dafPersist.Policy.defaultPolicyFile("obs_base", "ImageMappingDefaults.yaml", "policy")
346 )
347 expMappingPolicy = dafPersist.Policy(
348 dafPersist.Policy.defaultPolicyFile("obs_base", "ExposureMappingDefaults.yaml", "policy")
349 )
350 calMappingPolicy = dafPersist.Policy(
351 dafPersist.Policy.defaultPolicyFile("obs_base", "CalibrationMappingDefaults.yaml", "policy")
352 )
353 dsMappingPolicy = dafPersist.Policy()
355 # Mappings
356 mappingList = (
357 ("images", imgMappingPolicy, ImageMapping),
358 ("exposures", expMappingPolicy, ExposureMapping),
359 ("calibrations", calMappingPolicy, CalibrationMapping),
360 ("datasets", dsMappingPolicy, DatasetMapping),
361 )
362 self.mappings = dict()
363 for name, defPolicy, cls in mappingList:
364 if name in policy:
365 datasets = policy[name]
367 # Centrally-defined datasets
368 defaultsPath = os.path.join(getPackageDir("obs_base"), "policy", name + ".yaml")
369 if os.path.exists(defaultsPath):
370 datasets.merge(dafPersist.Policy(defaultsPath))
372 mappings = dict()
373 setattr(self, name, mappings)
374 for datasetType in datasets.names(True):
375 subPolicy = datasets[datasetType]
376 subPolicy.merge(defPolicy)
378 if not hasattr(self, "map_" + datasetType) and "composite" in subPolicy:
380 def compositeClosure(
381 dataId, write=False, mapper=None, mapping=None, subPolicy=subPolicy
382 ):
383 components = subPolicy.get("composite")
384 assembler = subPolicy["assembler"] if "assembler" in subPolicy else None
385 disassembler = subPolicy["disassembler"] if "disassembler" in subPolicy else None
386 python = subPolicy["python"]
387 butlerComposite = dafPersist.ButlerComposite(
388 assembler=assembler,
389 disassembler=disassembler,
390 python=python,
391 dataId=dataId,
392 mapper=self,
393 )
394 for name, component in components.items():
395 butlerComposite.add(
396 id=name,
397 datasetType=component.get("datasetType"),
398 setter=component.get("setter", None),
399 getter=component.get("getter", None),
400 subset=component.get("subset", False),
401 inputOnly=component.get("inputOnly", False),
402 )
403 return butlerComposite
405 setattr(self, "map_" + datasetType, compositeClosure)
406 # for now at least, don't set up any other handling for
407 # this dataset type.
408 continue
410 if name == "calibrations":
411 mapping = cls(
412 datasetType,
413 subPolicy,
414 self.registry,
415 self.calibRegistry,
416 calibStorage,
417 provided=provided,
418 dataRoot=rootStorage,
419 )
420 else:
421 mapping = cls(datasetType, subPolicy, self.registry, rootStorage, provided=provided)
423 if datasetType in self.mappings:
424 raise ValueError(f"Duplicate mapping policy for dataset type {datasetType}")
425 self.keyDict.update(mapping.keys())
426 mappings[datasetType] = mapping
427 self.mappings[datasetType] = mapping
428 if not hasattr(self, "map_" + datasetType):
430 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
431 return mapping.map(mapper, dataId, write)
433 setattr(self, "map_" + datasetType, mapClosure)
434 if not hasattr(self, "query_" + datasetType):
436 def queryClosure(format, dataId, mapping=mapping):
437 return mapping.lookup(format, dataId)
439 setattr(self, "query_" + datasetType, queryClosure)
440 if hasattr(mapping, "standardize") and not hasattr(self, "std_" + datasetType):
442 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
443 return mapping.standardize(mapper, item, dataId)
445 setattr(self, "std_" + datasetType, stdClosure)
447 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
448 """Set convenience methods on CameraMapper"""
449 mapName = "map_" + datasetType + "_" + suffix
450 bypassName = "bypass_" + datasetType + "_" + suffix
451 queryName = "query_" + datasetType + "_" + suffix
452 if not hasattr(self, mapName):
453 setattr(self, mapName, mapImpl or getattr(self, "map_" + datasetType))
454 if not hasattr(self, bypassName):
455 if bypassImpl is None and hasattr(self, "bypass_" + datasetType):
456 bypassImpl = getattr(self, "bypass_" + datasetType)
457 if bypassImpl is not None:
458 setattr(self, bypassName, bypassImpl)
459 if not hasattr(self, queryName):
460 setattr(self, queryName, queryImpl or getattr(self, "query_" + datasetType))
462 # Filename of dataset
463 setMethods(
464 "filename",
465 bypassImpl=lambda datasetType, pythonType, location, dataId: [
466 os.path.join(location.getStorage().root, p) for p in location.getLocations()
467 ],
468 )
469 # Metadata from FITS file
470 if subPolicy["storage"] == "FitsStorage": # a FITS image
472 def getMetadata(datasetType, pythonType, location, dataId):
473 md = readMetadata(location.getLocationsWithRoot()[0])
474 fix_header(md, translator_class=self.translatorClass)
475 return md
477 setMethods("md", bypassImpl=getMetadata)
479 # Add support for configuring FITS compression
480 addName = "add_" + datasetType
481 if not hasattr(self, addName):
482 setattr(self, addName, self.getImageCompressionSettings)
484 if name == "exposures":
486 def getSkyWcs(datasetType, pythonType, location, dataId):
487 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
488 return fitsReader.readWcs()
490 setMethods("wcs", bypassImpl=getSkyWcs)
492 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
493 """Create a SkyWcs from the un-modified raw
494 FITS WCS header keys."""
495 if datasetType[:3] != "raw":
496 raise dafPersist.NoResults(
497 "Can only get header WCS for raw exposures.", datasetType, dataId
498 )
499 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
501 setMethods("header_wcs", bypassImpl=getRawHeaderWcs)
503 def getPhotoCalib(datasetType, pythonType, location, dataId):
504 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
505 return fitsReader.readPhotoCalib()
507 setMethods("photoCalib", bypassImpl=getPhotoCalib)
509 def getVisitInfo(datasetType, pythonType, location, dataId):
510 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
511 return fitsReader.readVisitInfo()
513 setMethods("visitInfo", bypassImpl=getVisitInfo)
515 def getFilter(datasetType, pythonType, location, dataId):
516 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
517 storedFilter = fitsReader.readFilter()
519 # Apply standardization used by full Exposure
520 try:
521 # mapping is local to enclosing scope
522 idFilter = mapping.need(["filter"], dataId)["filter"]
523 except dafPersist.NoResults:
524 idFilter = None
525 bestFilter = self._getBestFilter(storedFilter, idFilter)
526 if bestFilter is not None:
527 return bestFilter
528 else:
529 return storedFilter
531 setMethods("filter", bypassImpl=getFilter)
533 # TODO: remove in DM-27811
534 @deprecated(
535 reason="Replaced by 'filter' component. Will be removed after v24.",
536 version="v24.0",
537 category=FutureWarning,
538 )
539 def getFilterLabel(datasetType, pythonType, location, dataId):
540 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
541 storedFilter = fitsReader.readFilterLabel()
543 # Apply standardization used by full Exposure
544 try:
545 # mapping is local to enclosing scope
546 idFilter = mapping.need(["filter"], dataId)["filter"]
547 except dafPersist.NoResults:
548 idFilter = None
549 bestFilter = self._getBestFilter(storedFilter, idFilter)
550 if bestFilter is not None:
551 return bestFilter
552 else:
553 return storedFilter
555 setMethods("filterLabel", bypassImpl=getFilterLabel)
557 setMethods(
558 "detector",
559 mapImpl=lambda dataId, write=False: dafPersist.ButlerLocation(
560 pythonType="lsst.afw.cameraGeom.CameraConfig",
561 cppType="Config",
562 storageName="Internal",
563 locationList="ignored",
564 dataId=dataId,
565 mapper=self,
566 storage=None,
567 ),
568 bypassImpl=lambda datasetType, pythonType, location, dataId: self.camera[
569 self._extractDetectorName(dataId)
570 ],
571 )
573 def getBBox(datasetType, pythonType, location, dataId):
574 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
575 fix_header(md, translator_class=self.translatorClass)
576 return afwImage.bboxFromMetadata(md)
578 setMethods("bbox", bypassImpl=getBBox)
580 elif name == "images":
582 def getBBox(datasetType, pythonType, location, dataId):
583 md = readMetadata(location.getLocationsWithRoot()[0])
584 fix_header(md, translator_class=self.translatorClass)
585 return afwImage.bboxFromMetadata(md)
587 setMethods("bbox", bypassImpl=getBBox)
589 if subPolicy["storage"] == "FitsCatalogStorage": # a FITS catalog
591 def getMetadata(datasetType, pythonType, location, dataId):
592 md = readMetadata(
593 os.path.join(location.getStorage().root, location.getLocations()[0]), hdu=1
594 )
595 fix_header(md, translator_class=self.translatorClass)
596 return md
598 setMethods("md", bypassImpl=getMetadata)
600 # Sub-images
601 if subPolicy["storage"] == "FitsStorage":
603 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
604 subId = dataId.copy()
605 del subId["bbox"]
606 loc = mapping.map(mapper, subId, write)
607 bbox = dataId["bbox"]
608 llcX = bbox.getMinX()
609 llcY = bbox.getMinY()
610 width = bbox.getWidth()
611 height = bbox.getHeight()
612 loc.additionalData.set("llcX", llcX)
613 loc.additionalData.set("llcY", llcY)
614 loc.additionalData.set("width", width)
615 loc.additionalData.set("height", height)
616 if "imageOrigin" in dataId:
617 loc.additionalData.set("imageOrigin", dataId["imageOrigin"])
618 return loc
620 def querySubClosure(key, format, dataId, mapping=mapping):
621 subId = dataId.copy()
622 del subId["bbox"]
623 return mapping.lookup(format, subId)
625 setMethods("sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
627 if subPolicy["storage"] == "FitsCatalogStorage":
628 # Length of catalog
630 def getLen(datasetType, pythonType, location, dataId):
631 md = readMetadata(
632 os.path.join(location.getStorage().root, location.getLocations()[0]), hdu=1
633 )
634 fix_header(md, translator_class=self.translatorClass)
635 return md["NAXIS2"]
637 setMethods("len", bypassImpl=getLen)
639 # Schema of catalog
640 if not datasetType.endswith("_schema") and datasetType + "_schema" not in datasets:
641 setMethods(
642 "schema",
643 bypassImpl=lambda datasetType, pythonType, location, dataId: Schema.readFits(
644 os.path.join(location.getStorage().root, location.getLocations()[0])
645 ),
646 )
648 def _computeCcdExposureId(self, dataId):
649 """Compute the 64-bit (long) identifier for a CCD exposure.
651 Subclasses must override
653 Parameters
654 ----------
655 dataId : `dict`
656 Data identifier with visit, ccd.
657 """
658 raise NotImplementedError()
660 def _computeCoaddExposureId(self, dataId, singleFilter):
661 """Compute the 64-bit (long) identifier for a coadd.
663 Subclasses must override
665 Parameters
666 ----------
667 dataId : `dict`
668 Data identifier with tract and patch.
669 singleFilter : `bool`
670 True means the desired ID is for a single-filter coadd, in which
671 case dataIdmust contain filter.
672 """
673 raise NotImplementedError()
675 def _search(self, path):
676 """Search for path in the associated repository's storage.
678 Parameters
679 ----------
680 path : string
681 Path that describes an object in the repository associated with
682 this mapper.
683 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
684 indicator will be stripped when searching and so will match
685 filenames without the HDU indicator, e.g. 'foo.fits'. The path
686 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
688 Returns
689 -------
690 string
691 The path for this object in the repository. Will return None if the
692 object can't be found. If the input argument path contained an HDU
693 indicator, the returned path will also contain the HDU indicator.
694 """
695 return self.rootStorage.search(path)
697 def backup(self, datasetType, dataId):
698 """Rename any existing object with the given type and dataId.
700 The CameraMapper implementation saves objects in a sequence of e.g.:
702 - foo.fits
703 - foo.fits~1
704 - foo.fits~2
706 All of the backups will be placed in the output repo, however, and will
707 not be removed if they are found elsewhere in the _parent chain. This
708 means that the same file will be stored twice if the previous version
709 was found in an input repo.
710 """
712 # Calling PosixStorage directly is not the long term solution in this
713 # function, this is work-in-progress on epic DM-6225. The plan is for
714 # parentSearch to be changed to 'search', and search only the storage
715 # associated with this mapper. All searching of parents will be handled
716 # by traversing the container of repositories in Butler.
718 def firstElement(list):
719 """Get the first element in the list, or None if that can't be
720 done.
721 """
722 return list[0] if list is not None and len(list) else None
724 n = 0
725 newLocation = self.map(datasetType, dataId, write=True)
726 newPath = newLocation.getLocations()[0]
727 path = dafPersist.PosixStorage.search(self.root, newPath, searchParents=True)
728 path = firstElement(path)
729 oldPaths = []
730 while path is not None:
731 n += 1
732 oldPaths.append((n, path))
733 path = dafPersist.PosixStorage.search(self.root, "%s~%d" % (newPath, n), searchParents=True)
734 path = firstElement(path)
735 for n, oldPath in reversed(oldPaths):
736 self.rootStorage.copyFile(oldPath, "%s~%d" % (newPath, n))
738 def keys(self):
739 """Return supported keys.
741 Returns
742 -------
743 iterable
744 List of keys usable in a dataset identifier
745 """
746 return iter(self.keyDict.keys())
748 def getKeys(self, datasetType, level):
749 """Return a dict of supported keys and their value types for a given
750 dataset type at a given level of the key hierarchy.
752 Parameters
753 ----------
754 datasetType : `str`
755 Dataset type or None for all dataset types.
756 level : `str` or None
757 Level or None for all levels or '' for the default level for the
758 camera.
760 Returns
761 -------
762 `dict`
763 Keys are strings usable in a dataset identifier, values are their
764 value types.
765 """
767 # not sure if this is how we want to do this. what if None was
768 # intended?
769 if level == "":
770 level = self.getDefaultLevel()
772 if datasetType is None:
773 keyDict = copy.copy(self.keyDict)
774 else:
775 keyDict = self.mappings[datasetType].keys()
776 if level is not None and level in self.levels:
777 keyDict = copy.copy(keyDict)
778 for lev in self.levels[level]:
779 if lev in keyDict:
780 del keyDict[lev]
781 return keyDict
783 def getDefaultLevel(self):
784 return self.defaultLevel
786 def getDefaultSubLevel(self, level):
787 if level in self.defaultSubLevels:
788 return self.defaultSubLevels[level]
789 return None
791 @classmethod
792 def getCameraName(cls):
793 """Return the name of the camera that this CameraMapper is for."""
794 className = str(cls)
795 className = className[className.find(".") : -1]
796 m = re.search(r"(\w+)Mapper", className)
797 if m is None:
798 m = re.search(r"class '[\w.]*?(\w+)'", className)
799 name = m.group(1)
800 return name[:1].lower() + name[1:] if name else ""
802 @classmethod
803 def getPackageName(cls):
804 """Return the name of the package containing this CameraMapper."""
805 if cls.packageName is None:
806 raise ValueError("class variable packageName must not be None")
807 return cls.packageName
809 @classmethod
810 def getGen3Instrument(cls):
811 """Return the gen3 Instrument class equivalent for this gen2 Mapper.
813 Returns
814 -------
815 instr : `type`
816 A `~lsst.obs.base.Instrument` class.
817 """
818 if cls._gen3instrument is None:
819 raise NotImplementedError(
820 "Please provide a specific implementation for your instrument"
821 " to enable conversion of this gen2 repository to gen3"
822 )
823 if isinstance(cls._gen3instrument, str):
824 # Given a string to convert to an instrument class
825 cls._gen3instrument = doImportType(cls._gen3instrument)
826 if not issubclass(cls._gen3instrument, Instrument):
827 raise ValueError(
828 f"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}"
829 " but that is not an lsst.obs.base.Instrument"
830 )
831 return cls._gen3instrument
833 @classmethod
834 def getPackageDir(cls):
835 """Return the base directory of this package"""
836 return getPackageDir(cls.getPackageName())
838 def map_camera(self, dataId, write=False):
839 """Map a camera dataset."""
840 if self.camera is None:
841 raise RuntimeError("No camera dataset available.")
842 actualId = self._transformId(dataId)
843 return dafPersist.ButlerLocation(
844 pythonType="lsst.afw.cameraGeom.CameraConfig",
845 cppType="Config",
846 storageName="ConfigStorage",
847 locationList=self.cameraDataLocation or "ignored",
848 dataId=actualId,
849 mapper=self,
850 storage=self.rootStorage,
851 )
853 def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId):
854 """Return the (preloaded) camera object."""
855 if self.camera is None:
856 raise RuntimeError("No camera dataset available.")
857 return self.camera
859 def map_expIdInfo(self, dataId, write=False):
860 return dafPersist.ButlerLocation(
861 pythonType="lsst.obs.base.ExposureIdInfo",
862 cppType=None,
863 storageName="Internal",
864 locationList="ignored",
865 dataId=dataId,
866 mapper=self,
867 storage=self.rootStorage,
868 )
870 def bypass_expIdInfo(self, datasetType, pythonType, location, dataId):
871 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
872 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
873 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
874 return ExposureIdInfo(expId=expId, expBits=expBits)
876 def std_bfKernel(self, item, dataId):
877 """Disable standardization for bfKernel
879 bfKernel is a calibration product that is numpy array,
880 unlike other calibration products that are all images;
881 all calibration images are sent through _standardizeExposure
882 due to CalibrationMapping, but we don't want that to happen to bfKernel
883 """
884 return item
886 def std_raw(self, item, dataId):
887 """Standardize a raw dataset by converting it to an Exposure instead
888 of an Image"""
889 return self._standardizeExposure(
890 self.exposures["raw"], item, dataId, trimmed=False, setVisitInfo=True, setExposureId=True
891 )
893 def map_skypolicy(self, dataId):
894 """Map a sky policy."""
895 return dafPersist.ButlerLocation(
896 "lsst.pex.policy.Policy", "Policy", "Internal", None, None, self, storage=self.rootStorage
897 )
899 def std_skypolicy(self, item, dataId):
900 """Standardize a sky policy by returning the one we use."""
901 return self.skypolicy
903 ##########################################################################
904 #
905 # Utility functions
906 #
907 ##########################################################################
909 def _setupRegistry(
910 self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True
911 ):
912 """Set up a registry (usually SQLite3), trying a number of possible
913 paths.
915 Parameters
916 ----------
917 name : string
918 Name of registry.
919 description: `str`
920 Description of registry (for log messages)
921 path : string
922 Path for registry.
923 policy : string
924 Policy that contains the registry name, used if path is None.
925 policyKey : string
926 Key in policy for registry path.
927 storage : Storage subclass
928 Repository Storage to look in.
929 searchParents : bool, optional
930 True if the search for a registry should follow any Butler v1
931 _parent symlinks.
932 posixIfNoSql : bool, optional
933 If an sqlite registry is not found, will create a posix registry if
934 this is True.
936 Returns
937 -------
938 lsst.daf.persistence.Registry
939 Registry object
940 """
941 if path is None and policyKey in policy:
942 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
943 if os.path.isabs(path):
944 raise RuntimeError("Policy should not indicate an absolute path for registry.")
945 if not storage.exists(path):
946 newPath = storage.instanceSearch(path)
948 newPath = newPath[0] if newPath is not None and len(newPath) else None
949 if newPath is None:
950 self.log.warning(
951 "Unable to locate registry at policy path (also looked in root): %s", path
952 )
953 path = newPath
954 else:
955 self.log.warning("Unable to locate registry at policy path: %s", path)
956 path = None
958 # Old Butler API was to indicate the registry WITH the repo folder,
959 # New Butler expects the registry to be in the repo folder. To support
960 # Old API, check to see if path starts with root, and if so, strip
961 # root from path. Currently only works with PosixStorage
962 try:
963 root = storage.root
964 if path and (path.startswith(root)):
965 path = path[len(root + "/") :]
966 except AttributeError:
967 pass
969 # determine if there is an sqlite registry and if not, try the posix
970 # registry.
971 registry = None
973 def search(filename, description):
974 """Search for file in storage
976 Parameters
977 ----------
978 filename : `str`
979 Filename to search for
980 description : `str`
981 Description of file, for error message.
983 Returns
984 -------
985 path : `str` or `None`
986 Path to file, or None
987 """
988 result = storage.instanceSearch(filename)
989 if result:
990 return result[0]
991 self.log.debug("Unable to locate %s: %s", description, filename)
992 return None
994 # Search for a suitable registry database
995 if path is None:
996 path = search("%s.pgsql" % name, "%s in root" % description)
997 if path is None:
998 path = search("%s.sqlite3" % name, "%s in root" % description)
999 if path is None:
1000 path = search(os.path.join(".", "%s.sqlite3" % name), "%s in current dir" % description)
1002 if path is not None:
1003 if not storage.exists(path):
1004 newPath = storage.instanceSearch(path)
1005 newPath = newPath[0] if newPath is not None and len(newPath) else None
1006 if newPath is not None:
1007 path = newPath
1008 localFileObj = storage.getLocalFile(path)
1009 self.log.info("Loading %s registry from %s", description, localFileObj.name)
1010 registry = dafPersist.Registry.create(localFileObj.name)
1011 localFileObj.close()
1012 elif not registry and posixIfNoSql:
1013 try:
1014 self.log.info("Loading Posix %s registry from %s", description, storage.root)
1015 registry = dafPersist.PosixRegistry(storage.root)
1016 except Exception:
1017 registry = None
1019 return registry
1021 def _transformId(self, dataId):
1022 """Generate a standard ID dict from a camera-specific ID dict.
1024 Canonical keys include:
1025 - amp: amplifier name
1026 - ccd: CCD name (in LSST this is a combination of raft and sensor)
1027 The default implementation returns a copy of its input.
1029 Parameters
1030 ----------
1031 dataId : `dict`
1032 Dataset identifier; this must not be modified
1034 Returns
1035 -------
1036 `dict`
1037 Transformed dataset identifier.
1038 """
1040 return dataId.copy()
1042 def _mapActualToPath(self, template, actualId):
1043 """Convert a template path to an actual path, using the actual data
1044 identifier. This implementation is usually sufficient but can be
1045 overridden by the subclass.
1047 Parameters
1048 ----------
1049 template : `str`
1050 Template path
1051 actualId : `dict`
1052 Dataset identifier
1054 Returns
1055 -------
1056 `str`
1057 Pathname
1058 """
1060 try:
1061 transformedId = self._transformId(actualId)
1062 return template % transformedId
1063 except Exception as e:
1064 raise RuntimeError("Failed to format %r with data %r: %s" % (template, transformedId, e))
1066 @staticmethod
1067 def getShortCcdName(ccdName):
1068 """Convert a CCD name to a form useful as a filename
1070 The default implementation converts spaces to underscores.
1071 """
1072 return ccdName.replace(" ", "_")
1074 def _extractDetectorName(self, dataId):
1075 """Extract the detector (CCD) name from the dataset identifier.
1077 The name in question is the detector name used by lsst.afw.cameraGeom.
1079 Parameters
1080 ----------
1081 dataId : `dict`
1082 Dataset identifier.
1084 Returns
1085 -------
1086 `str`
1087 Detector name
1088 """
1089 raise NotImplementedError("No _extractDetectorName() function specified")
1091 def _setAmpDetector(self, item, dataId, trimmed=True):
1092 """Set the detector object in an Exposure for an amplifier.
1094 Defects are also added to the Exposure based on the detector object.
1096 Parameters
1097 ----------
1098 item : `lsst.afw.image.Exposure`
1099 Exposure to set the detector in.
1100 dataId : `dict`
1101 Dataset identifier
1102 trimmed : `bool`
1103 Should detector be marked as trimmed? (ignored)
1104 """
1106 return self._setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1108 def _setCcdDetector(self, item, dataId, trimmed=True):
1109 """Set the detector object in an Exposure for a CCD.
1111 Parameters
1112 ----------
1113 item : `lsst.afw.image.Exposure`
1114 Exposure to set the detector in.
1115 dataId : `dict`
1116 Dataset identifier
1117 trimmed : `bool`
1118 Should detector be marked as trimmed? (ignored)
1119 """
1120 if item.getDetector() is not None:
1121 return
1123 detectorName = self._extractDetectorName(dataId)
1124 detector = self.camera[detectorName]
1125 item.setDetector(detector)
1127 @staticmethod
1128 def _resolveFilters(definitions, idFilter, filterLabel):
1129 """Identify the filter(s) consistent with partial filter information.
1131 Parameters
1132 ----------
1133 definitions : `lsst.obs.base.FilterDefinitionCollection`
1134 The filter definitions in which to search for filters.
1135 idFilter : `str` or `None`
1136 The filter information provided in a data ID.
1137 filterLabel : `lsst.afw.image.FilterLabel` or `None`
1138 The filter information provided by an exposure; may be incomplete.
1140 Returns
1141 -------
1142 filters : `set` [`lsst.obs.base.FilterDefinition`]
1143 The set of filters consistent with ``idFilter``
1144 and ``filterLabel``.
1145 """
1146 # Assume none of the filter constraints actually wrong/contradictory.
1147 # Then taking the intersection of all constraints will give a unique
1148 # result if one exists.
1149 matches = set(definitions)
1150 if idFilter is not None:
1151 matches.intersection_update(definitions.findAll(idFilter))
1152 if filterLabel is not None and filterLabel.hasPhysicalLabel():
1153 matches.intersection_update(definitions.findAll(filterLabel.physicalLabel))
1154 if filterLabel is not None and filterLabel.hasBandLabel():
1155 matches.intersection_update(definitions.findAll(filterLabel.bandLabel))
1156 return matches
1158 def _getBestFilter(self, storedLabel, idFilter):
1159 """Estimate the most complete filter information consistent with the
1160 file or registry.
1162 Parameters
1163 ----------
1164 storedLabel : `lsst.afw.image.FilterLabel` or `None`
1165 The filter previously stored in the file.
1166 idFilter : `str` or `None`
1167 The filter implied by the data ID, if any.
1169 Returns
1170 -------
1171 bestFitler : `lsst.afw.image.FilterLabel` or `None`
1172 The complete filter to describe the dataset. May be equal to
1173 ``storedLabel``. `None` if no recommendation can be generated.
1174 """
1175 try:
1176 # getGen3Instrument returns class; need to construct it.
1177 filterDefinitions = self.getGen3Instrument()().filterDefinitions
1178 except NotImplementedError:
1179 filterDefinitions = None
1181 if filterDefinitions is not None:
1182 definitions = self._resolveFilters(filterDefinitions, idFilter, storedLabel)
1183 self.log.debug(
1184 "Matching filters for id=%r and label=%r are %s.", idFilter, storedLabel, definitions
1185 )
1186 if len(definitions) == 1:
1187 newLabel = list(definitions)[0].makeFilterLabel()
1188 return newLabel
1189 elif definitions:
1190 # Some instruments have many filters for the same band, of
1191 # which one is known by band name and the others always by
1192 # afw name (e.g., i, i2).
1193 nonAfw = {f for f in definitions if f.afw_name is None}
1194 if len(nonAfw) == 1:
1195 newLabel = list(nonAfw)[0].makeFilterLabel()
1196 self.log.debug("Assuming %r is the correct match.", newLabel)
1197 return newLabel
1199 self.log.warning("Multiple matches for filter %r with data ID %r.", storedLabel, idFilter)
1200 # Can we at least add a band?
1201 # Never expect multiple definitions with same physical filter.
1202 bands = {d.band for d in definitions} # None counts as separate result!
1203 if len(bands) == 1 and storedLabel is None:
1204 band = list(bands)[0]
1205 return afwImage.FilterLabel(band=band)
1206 else:
1207 return None
1208 else:
1209 # Unknown filter, nothing to be done.
1210 self.log.warning("Cannot reconcile filter %r with data ID %r.", storedLabel, idFilter)
1211 return None
1213 # Not practical to recommend a FilterLabel without filterDefinitions
1215 return None
1217 def _setFilter(self, mapping, item, dataId):
1218 """Set the filter information in an Exposure.
1220 The Exposure should already have had a filter loaded, but the reader
1221 (in ``afw``) had to act on incomplete information. This method
1222 cross-checks the filter against the data ID and the standard list
1223 of filters.
1225 Parameters
1226 ----------
1227 mapping : `lsst.obs.base.Mapping`
1228 Where to get the data ID filter from.
1229 item : `lsst.afw.image.Exposure`
1230 Exposure to set the filter in.
1231 dataId : `dict`
1232 Dataset identifier.
1233 """
1234 if not (
1235 isinstance(item, afwImage.ExposureU)
1236 or isinstance(item, afwImage.ExposureI)
1237 or isinstance(item, afwImage.ExposureF)
1238 or isinstance(item, afwImage.ExposureD)
1239 ):
1240 return
1242 itemFilter = item.getFilter() # may be None
1243 try:
1244 idFilter = mapping.need(["filter"], dataId)["filter"]
1245 except dafPersist.NoResults:
1246 idFilter = None
1248 bestFilter = self._getBestFilter(itemFilter, idFilter)
1249 if bestFilter is not None:
1250 if bestFilter != itemFilter:
1251 item.setFilter(bestFilter)
1252 # Already using bestFilter, avoid unnecessary edits
1253 elif itemFilter is None:
1254 # Old Filter cleanup, without the benefit of FilterDefinition
1255 if self.filters is not None and idFilter in self.filters:
1256 idFilter = self.filters[idFilter]
1258 def _standardizeExposure(
1259 self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True, setExposureId=False
1260 ):
1261 """Default standardization function for images.
1263 This sets the Detector from the camera geometry
1264 and optionally set the Filter. In both cases this saves
1265 having to persist some data in each exposure (or image).
1267 Parameters
1268 ----------
1269 mapping : `lsst.obs.base.Mapping`
1270 Where to get the values from.
1271 item : image-like object
1272 Can be any of lsst.afw.image.Exposure,
1273 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1274 or lsst.afw.image.MaskedImage
1276 dataId : `dict`
1277 Dataset identifier
1278 filter : `bool`
1279 Set filter? Ignored if item is already an exposure
1280 trimmed : `bool`
1281 Should detector be marked as trimmed?
1282 setVisitInfo : `bool`
1283 Should Exposure have its VisitInfo filled out from the metadata?
1284 setExposureId : `bool`
1285 Should Exposure have its exposure ID filled out from the data ID?
1287 Returns
1288 -------
1289 `lsst.afw.image.Exposure`
1290 The standardized Exposure.
1291 """
1292 try:
1293 exposure = exposureFromImage(
1294 item,
1295 dataId,
1296 mapper=self,
1297 logger=self.log,
1298 setVisitInfo=setVisitInfo,
1299 setFilter=filter,
1300 setExposureId=setExposureId,
1301 )
1302 except Exception as e:
1303 self.log.error("Could not turn item=%r into an exposure: %s", item, e)
1304 raise
1306 if mapping.level.lower() == "amp":
1307 self._setAmpDetector(exposure, dataId, trimmed)
1308 elif mapping.level.lower() == "ccd":
1309 self._setCcdDetector(exposure, dataId, trimmed)
1311 # We can only create a WCS if it doesn't already have one and
1312 # we have either a VisitInfo or exposure metadata.
1313 # Do not calculate a WCS if this is an amplifier exposure
1314 if (
1315 mapping.level.lower() != "amp"
1316 and exposure.getWcs() is None
1317 and (exposure.getInfo().getVisitInfo() is not None or exposure.getMetadata().toDict())
1318 ):
1319 self._createInitialSkyWcs(exposure)
1321 if filter:
1322 self._setFilter(mapping, exposure, dataId)
1324 return exposure
1326 def _createSkyWcsFromMetadata(self, exposure):
1327 """Create a SkyWcs from the FITS header metadata in an Exposure.
1329 Parameters
1330 ----------
1331 exposure : `lsst.afw.image.Exposure`
1332 The exposure to get metadata from, and attach the SkyWcs to.
1333 """
1334 metadata = exposure.getMetadata()
1335 fix_header(metadata, translator_class=self.translatorClass)
1336 try:
1337 wcs = afwGeom.makeSkyWcs(metadata, strip=True)
1338 exposure.setWcs(wcs)
1339 except pexExcept.TypeError as e:
1340 # See DM-14372 for why this is debug and not warn (e.g. calib
1341 # files without wcs metadata).
1342 self.log.debug(
1343 "wcs set to None; missing information found in metadata to create a valid wcs: %s",
1344 e.args[0],
1345 )
1346 # ensure any WCS values stripped from the metadata are removed in the
1347 # exposure
1348 exposure.setMetadata(metadata)
1350 def _createInitialSkyWcs(self, exposure):
1351 """Create a SkyWcs from the boresight and camera geometry.
1353 If the boresight or camera geometry do not support this method of
1354 WCS creation, this falls back on the header metadata-based version
1355 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1357 Parameters
1358 ----------
1359 exposure : `lsst.afw.image.Exposure`
1360 The exposure to get data from, and attach the SkyWcs to.
1361 """
1362 # Always use try to use metadata first, to strip WCS keys from it.
1363 self._createSkyWcsFromMetadata(exposure)
1365 if exposure.getInfo().getVisitInfo() is None:
1366 msg = "No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1367 self.log.warning(msg)
1368 return
1369 try:
1370 newSkyWcs = createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1371 exposure.setWcs(newSkyWcs)
1372 except InitialSkyWcsError as e:
1373 msg = "Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1374 self.log.warning(msg, e)
1375 self.log.debug("Exception was: %s", traceback.TracebackException.from_exception(e))
1376 if e.__context__ is not None:
1377 self.log.debug(
1378 "Root-cause Exception was: %s", traceback.TracebackException.from_exception(e.__context__)
1379 )
1381 def _makeCamera(self, policy, repositoryDir):
1382 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1383 the camera geometry
1385 Also set self.cameraDataLocation, if relevant (else it can be left
1386 None).
1388 This implementation assumes that policy contains an entry "camera"
1389 that points to the subdirectory in this package of camera data;
1390 specifically, that subdirectory must contain:
1391 - a file named `camera.py` that contains persisted camera config
1392 - ampInfo table FITS files, as required by
1393 lsst.afw.cameraGeom.makeCameraFromPath
1395 Parameters
1396 ----------
1397 policy : `lsst.daf.persistence.Policy`
1398 Policy with per-camera defaults already merged
1399 (PexPolicy only for backward compatibility).
1400 repositoryDir : `str`
1401 Policy repository for the subclassing module (obtained with
1402 getRepositoryPath() on the per-camera default dictionary).
1403 """
1404 if "camera" not in policy:
1405 raise RuntimeError("Cannot find 'camera' in policy; cannot construct a camera")
1406 cameraDataSubdir = policy["camera"]
1407 self.cameraDataLocation = os.path.normpath(os.path.join(repositoryDir, cameraDataSubdir, "camera.py"))
1408 cameraConfig = afwCameraGeom.CameraConfig()
1409 cameraConfig.load(self.cameraDataLocation)
1410 ampInfoPath = os.path.dirname(self.cameraDataLocation)
1411 return afwCameraGeom.makeCameraFromPath(
1412 cameraConfig=cameraConfig,
1413 ampInfoPath=ampInfoPath,
1414 shortNameFunc=self.getShortCcdName,
1415 pupilFactoryClass=self.PupilFactoryClass,
1416 )
1418 def getRegistry(self):
1419 """Get the registry used by this mapper.
1421 Returns
1422 -------
1423 Registry or None
1424 The registry used by this mapper for this mapper's repository.
1425 """
1426 return self.registry
1428 def getImageCompressionSettings(self, datasetType, dataId):
1429 """Stuff image compression settings into a daf.base.PropertySet
1431 This goes into the ButlerLocation's "additionalData", which gets
1432 passed into the boost::persistence framework.
1434 Parameters
1435 ----------
1436 datasetType : `str`
1437 Type of dataset for which to get the image compression settings.
1438 dataId : `dict`
1439 Dataset identifier.
1441 Returns
1442 -------
1443 additionalData : `lsst.daf.base.PropertySet`
1444 Image compression settings.
1445 """
1446 mapping = self.mappings[datasetType]
1447 recipeName = mapping.recipe
1448 storageType = mapping.storage
1449 if storageType not in self._writeRecipes:
1450 return dafBase.PropertySet()
1451 if recipeName not in self._writeRecipes[storageType]:
1452 raise RuntimeError(
1453 "Unrecognized write recipe for datasetType %s (storage type %s): %s"
1454 % (datasetType, storageType, recipeName)
1455 )
1456 recipe = self._writeRecipes[storageType][recipeName].deepCopy()
1457 seed = hash(tuple(dataId.items())) % 2**31
1458 for plane in ("image", "mask", "variance"):
1459 if recipe.exists(plane + ".scaling.seed") and recipe.getScalar(plane + ".scaling.seed") == 0:
1460 recipe.set(plane + ".scaling.seed", seed)
1461 return recipe
1463 def _initWriteRecipes(self):
1464 """Read the recipes for writing files
1466 These recipes are currently used for configuring FITS compression,
1467 but they could have wider uses for configuring different flavors
1468 of the storage types. A recipe is referred to by a symbolic name,
1469 which has associated settings. These settings are stored as a
1470 `PropertySet` so they can easily be passed down to the
1471 boost::persistence framework as the "additionalData" parameter.
1473 The list of recipes is written in YAML. A default recipe and
1474 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1475 and these may be overridden or supplemented by the individual obs_*
1476 packages' own policy/writeRecipes.yaml files.
1478 Recipes are grouped by the storage type. Currently, only the
1479 ``FitsStorage`` storage type uses recipes, which uses it to
1480 configure FITS image compression.
1482 Each ``FitsStorage`` recipe for FITS compression should define
1483 "image", "mask" and "variance" entries, each of which may contain
1484 "compression" and "scaling" entries. Defaults will be provided for
1485 any missing elements under "compression" and "scaling".
1487 The allowed entries under "compression" are:
1489 * algorithm (string): compression algorithm to use
1490 * rows (int): number of rows per tile (0 = entire dimension)
1491 * columns (int): number of columns per tile (0 = entire dimension)
1492 * quantizeLevel (float): cfitsio quantization level
1494 The allowed entries under "scaling" are:
1496 * algorithm (string): scaling algorithm to use
1497 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1498 * fuzz (bool): fuzz the values when quantising floating-point values?
1499 * seed (long): seed for random number generator when fuzzing
1500 * maskPlanes (list of string): mask planes to ignore when doing
1501 statistics
1502 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1503 * quantizePad: number of stdev to allow on the low side (for
1504 STDEV_POSITIVE/NEGATIVE)
1505 * bscale: manually specified BSCALE (for MANUAL scaling)
1506 * bzero: manually specified BSCALE (for MANUAL scaling)
1508 A very simple example YAML recipe:
1510 FitsStorage:
1511 default:
1512 image: &default
1513 compression:
1514 algorithm: GZIP_SHUFFLE
1515 mask: *default
1516 variance: *default
1517 """
1518 recipesFile = os.path.join(getPackageDir("obs_base"), "policy", "writeRecipes.yaml")
1519 recipes = dafPersist.Policy(recipesFile)
1520 supplementsFile = os.path.join(self.getPackageDir(), "policy", "writeRecipes.yaml")
1521 validationMenu = {
1522 "FitsStorage": validateRecipeFitsStorage,
1523 }
1524 if os.path.exists(supplementsFile) and supplementsFile != recipesFile:
1525 supplements = dafPersist.Policy(supplementsFile)
1526 # Don't allow overrides, only supplements
1527 for entry in validationMenu:
1528 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1529 if intersection:
1530 raise RuntimeError(
1531 "Recipes provided in %s section %s may not override those in %s: %s"
1532 % (supplementsFile, entry, recipesFile, intersection)
1533 )
1534 recipes.update(supplements)
1536 self._writeRecipes = {}
1537 for storageType in recipes.names(True):
1538 if "default" not in recipes[storageType]:
1539 raise RuntimeError(
1540 "No 'default' recipe defined for storage type %s in %s" % (storageType, recipesFile)
1541 )
1542 self._writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1545def exposureFromImage(
1546 image, dataId=None, mapper=None, logger=None, setVisitInfo=True, setFilter=False, setExposureId=False
1547):
1548 """Generate an Exposure from an image-like object
1550 If the image is a DecoratedImage then also set its metadata
1551 (Image and MaskedImage are missing the necessary metadata
1552 and Exposure already has those set)
1554 Parameters
1555 ----------
1556 image : Image-like object
1557 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1558 Exposure.
1559 dataId : `dict`, optional
1560 The data ID identifying the visit of the image.
1561 mapper : `lsst.obs.base.CameraMapper`, optional
1562 The mapper with which to convert the image.
1563 logger : `lsst.log.Log`, optional
1564 An existing logger to which to send output.
1565 setVisitInfo : `bool`, optional
1566 If `True`, create and attach a `lsst.afw.image.VisitInfo` to the
1567 result. Ignored if ``image`` is an `~lsst.afw.image.Exposure` with an
1568 existing ``VisitInfo``.
1569 setFilter : `bool`, optional
1570 If `True`, create and attach a `lsst.afw.image.FilterLabel` to the
1571 result. Converts non-``FilterLabel`` information provided in ``image``.
1572 Ignored if ``image`` is an `~lsst.afw.image.Exposure` with existing
1573 filter information.
1574 setExposureId : `bool`, optional
1575 If `True`, create and set an exposure ID from ``dataID``. Ignored if
1576 ``image`` is an `~lsst.afw.image.Exposure` with an existing ID.
1578 Returns
1579 -------
1580 `lsst.afw.image.Exposure`
1581 Exposure containing input image.
1582 """
1583 translatorClass = None
1584 if mapper is not None:
1585 translatorClass = mapper.translatorClass
1587 metadata = None
1588 if isinstance(image, afwImage.MaskedImage):
1589 exposure = afwImage.makeExposure(image)
1590 elif isinstance(image, afwImage.DecoratedImage):
1591 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1592 metadata = image.getMetadata()
1593 fix_header(metadata, translator_class=translatorClass)
1594 exposure.setMetadata(metadata)
1595 elif isinstance(image, afwImage.Exposure):
1596 exposure = image
1597 metadata = exposure.getMetadata()
1598 fix_header(metadata, translator_class=translatorClass)
1599 else: # Image
1600 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1602 # set exposure ID if we can
1603 if setExposureId and not exposure.info.hasId() and mapper is not None:
1604 try:
1605 exposureId = mapper._computeCcdExposureId(dataId)
1606 exposure.info.id = exposureId
1607 except NotImplementedError:
1608 logger.warning("Could not set exposure ID; mapper does not support it.")
1610 if metadata is not None:
1611 # set filter if we can
1612 if setFilter and mapper is not None and exposure.getFilter() is None:
1613 # Translate whatever was in the metadata
1614 if "FILTER" in metadata:
1615 oldFilter = metadata["FILTER"]
1616 idFilter = dataId["filter"] if "filter" in dataId else None
1617 # oldFilter may not be physical, but _getBestFilter always goes
1618 # through the FilterDefinitions instead of returning
1619 # unvalidated input.
1620 filter = mapper._getBestFilter(afwImage.FilterLabel(physical=oldFilter), idFilter)
1621 if filter is not None:
1622 exposure.setFilter(filter)
1623 # set VisitInfo if we can
1624 if setVisitInfo and exposure.getInfo().getVisitInfo() is None:
1625 if mapper is None:
1626 if not logger:
1627 logger = lsstLog.Log.getLogger("lsst.CameraMapper")
1628 logger.warn("I can only set the VisitInfo if you provide a mapper")
1629 else:
1630 exposureId = mapper._computeCcdExposureId(dataId)
1631 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1633 exposure.getInfo().setVisitInfo(visitInfo)
1635 return exposure
1638def validateRecipeFitsStorage(recipes):
1639 """Validate recipes for FitsStorage
1641 The recipes are supplemented with default values where appropriate.
1643 TODO: replace this custom validation code with Cerberus (DM-11846)
1645 Parameters
1646 ----------
1647 recipes : `lsst.daf.persistence.Policy`
1648 FitsStorage recipes to validate.
1650 Returns
1651 -------
1652 validated : `lsst.daf.base.PropertySet`
1653 Validated FitsStorage recipe.
1655 Raises
1656 ------
1657 `RuntimeError`
1658 If validation fails.
1659 """
1660 # Schemas define what should be there, and the default values (and by the
1661 # default value, the expected type).
1662 compressionSchema = {
1663 "algorithm": "NONE",
1664 "rows": 1,
1665 "columns": 0,
1666 "quantizeLevel": 0.0,
1667 }
1668 scalingSchema = {
1669 "algorithm": "NONE",
1670 "bitpix": 0,
1671 "maskPlanes": ["NO_DATA"],
1672 "seed": 0,
1673 "quantizeLevel": 4.0,
1674 "quantizePad": 5.0,
1675 "fuzz": True,
1676 "bscale": 1.0,
1677 "bzero": 0.0,
1678 }
1680 def checkUnrecognized(entry, allowed, description):
1681 """Check to see if the entry contains unrecognised keywords"""
1682 unrecognized = set(entry.keys()) - set(allowed)
1683 if unrecognized:
1684 raise RuntimeError(
1685 "Unrecognized entries when parsing image compression recipe %s: %s"
1686 % (description, unrecognized)
1687 )
1689 validated = {}
1690 for name in recipes.names(True):
1691 checkUnrecognized(recipes[name], ["image", "mask", "variance"], name)
1692 rr = dafBase.PropertySet()
1693 validated[name] = rr
1694 for plane in ("image", "mask", "variance"):
1695 checkUnrecognized(recipes[name][plane], ["compression", "scaling"], name + "->" + plane)
1697 for settings, schema in (("compression", compressionSchema), ("scaling", scalingSchema)):
1698 prefix = plane + "." + settings
1699 if settings not in recipes[name][plane]:
1700 for key in schema:
1701 rr.set(prefix + "." + key, schema[key])
1702 continue
1703 entry = recipes[name][plane][settings]
1704 checkUnrecognized(entry, schema.keys(), name + "->" + plane + "->" + settings)
1705 for key in schema:
1706 value = type(schema[key])(entry[key]) if key in entry else schema[key]
1707 rr.set(prefix + "." + key, value)
1708 return validated