29 from astro_metadata_translator
import fix_header
31 import lsst.daf.persistence
as dafPersist
32 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
33 import lsst.daf.base
as dafBase
34 import lsst.afw.geom
as afwGeom
35 import lsst.afw.image
as afwImage
36 import lsst.afw.table
as afwTable
37 from lsst.afw.fits
import readMetadata
38 import lsst.afw.cameraGeom
as afwCameraGeom
39 import lsst.log
as lsstLog
41 from .exposureIdInfo
import ExposureIdInfo
42 from .makeRawVisitInfo
import MakeRawVisitInfo
43 from .utils
import createInitialSkyWcs, InitialSkyWcsError
45 from ._instrument
import Instrument
47 __all__ = [
"CameraMapper",
"exposureFromImage"]
52 """CameraMapper is a base class for mappers that handle images from a
53 camera and products derived from them. This provides an abstraction layer
54 between the data on disk and the code.
56 Public methods: keys, queryMetadata, getDatasetTypes, map,
57 canStandardize, standardize
59 Mappers for specific data sources (e.g., CFHT Megacam, LSST
60 simulations, etc.) should inherit this class.
62 The CameraMapper manages datasets within a "root" directory. Note that
63 writing to a dataset present in the input root will hide the existing
64 dataset but not overwrite it. See #2160 for design discussion.
66 A camera is assumed to consist of one or more rafts, each composed of
67 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
68 (amps). A camera is also assumed to have a camera geometry description
69 (CameraGeom object) as a policy file, a filter description (Filter class
70 static configuration) as another policy file.
72 Information from the camera geometry and defects are inserted into all
73 Exposure objects returned.
75 The mapper uses one or two registries to retrieve metadata about the
76 images. The first is a registry of all raw exposures. This must contain
77 the time of the observation. One or more tables (or the equivalent)
78 within the registry are used to look up data identifier components that
79 are not specified by the user (e.g. filter) and to return results for
80 metadata queries. The second is an optional registry of all calibration
81 data. This should contain validity start and end entries for each
82 calibration dataset in the same timescale as the observation time.
84 Subclasses will typically set MakeRawVisitInfoClass and optionally the
85 metadata translator class:
87 MakeRawVisitInfoClass: a class variable that points to a subclass of
88 MakeRawVisitInfo, a functor that creates an
89 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
91 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
92 class to use for fixing metadata values. If it is not set an attempt
93 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
94 that the metadata fixup will try to infer the translator class from the
97 Subclasses must provide the following methods:
99 _extractDetectorName(self, dataId): returns the detector name for a CCD
100 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
101 a dataset identifier referring to that CCD or a subcomponent of it.
103 _computeCcdExposureId(self, dataId): see below
105 _computeCoaddExposureId(self, dataId, singleFilter): see below
107 Subclasses may also need to override the following methods:
109 _transformId(self, dataId): transformation of a data identifier
110 from colloquial usage (e.g., "ccdname") to proper/actual usage
111 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
112 commas). The default implementation does nothing. Note that this
113 method should not modify its input parameter.
115 getShortCcdName(self, ccdName): a static method that returns a shortened
116 name suitable for use as a filename. The default version converts spaces
119 _mapActualToPath(self, template, actualId): convert a template path to an
120 actual path, using the actual dataset identifier.
122 The mapper's behaviors are largely specified by the policy file.
123 See the MapperDictionary.paf for descriptions of the available items.
125 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
126 mappings (see Mappings class).
128 Common default mappings for all subclasses can be specified in the
129 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
130 provides a simple way to add a product to all camera mappers.
132 Functions to map (provide a path to the data given a dataset
133 identifier dictionary) and standardize (convert data into some standard
134 format or type) may be provided in the subclass as "map_{dataset type}"
135 and "std_{dataset type}", respectively.
137 If non-Exposure datasets cannot be retrieved using standard
138 daf_persistence methods alone, a "bypass_{dataset type}" function may be
139 provided in the subclass to return the dataset instead of using the
140 "datasets" subpolicy.
142 Implementations of map_camera and bypass_camera that should typically be
143 sufficient are provided in this base class.
149 Instead of auto-loading the camera at construction time, load it from
150 the calibration registry
154 policy : daf_persistence.Policy,
155 Policy with per-camera defaults already merged.
156 repositoryDir : string
157 Policy repository for the subclassing module (obtained with
158 getRepositoryPath() on the per-camera default dictionary).
159 root : string, optional
160 Path to the root directory for data.
161 registry : string, optional
162 Path to registry with data's metadata.
163 calibRoot : string, optional
164 Root directory for calibrations.
165 calibRegistry : string, optional
166 Path to registry with calibrations' metadata.
167 provided : list of string, optional
168 Keys provided by the mapper.
169 parentRegistry : Registry subclass, optional
170 Registry from a parent repository that may be used to look up
172 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
173 The configuration information for the repository this mapper is
180 MakeRawVisitInfoClass = MakeRawVisitInfo
183 PupilFactoryClass = afwCameraGeom.PupilFactory
186 translatorClass =
None
190 _gen3instrument =
None
193 root=None, registry=None, calibRoot=None, calibRegistry=None,
194 provided=None, parentRegistry=None, repositoryCfg=None):
196 dafPersist.Mapper.__init__(self)
198 self.
loglog = lsstLog.Log.getLogger(
"CameraMapper")
203 self.
rootroot = repositoryCfg.root
207 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None
208 if repoPolicy
is not None:
209 policy.update(repoPolicy)
213 if 'levels' in policy:
214 levelsPolicy = policy[
'levels']
215 for key
in levelsPolicy.names(
True):
216 self.
levelslevels[key] = set(levelsPolicy.asArray(key))
219 if 'defaultSubLevels' in policy:
225 root = dafPersist.LogicalLocation(root).locString()
227 self.
rootStoragerootStorage = dafPersist.Storage.makeFromURI(uri=root)
235 if calibRoot
is not None:
236 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
237 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
240 calibRoot = policy.get(
'calibRoot',
None)
242 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
244 if calibStorage
is None:
252 posixIfNoSql=(
not parentRegistry))
254 self.
registryregistry = parentRegistry
255 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
256 if needCalibRegistry:
259 "calibRegistryPath", calibStorage,
263 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
264 f
"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
284 raise ValueError(
'class variable packageName must not be None')
294 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
295 """Initialize mappings
297 For each of the dataset types that we want to be able to read, there
298 are methods that can be created to support them:
299 * map_<dataset> : determine the path for dataset
300 * std_<dataset> : standardize the retrieved dataset
301 * bypass_<dataset> : retrieve the dataset (bypassing the usual
303 * query_<dataset> : query the registry
305 Besides the dataset types explicitly listed in the policy, we create
306 additional, derived datasets for additional conveniences,
307 e.g., reading the header of an image, retrieving only the size of a
312 policy : `lsst.daf.persistence.Policy`
313 Policy with per-camera defaults already merged
314 rootStorage : `Storage subclass instance`
315 Interface to persisted repository data.
316 calibRoot : `Storage subclass instance`
317 Interface to persisted calib repository data
318 provided : `list` of `str`
319 Keys provided by the mapper
322 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
324 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
325 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
326 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
327 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
328 dsMappingPolicy = dafPersist.Policy()
332 (
"images", imgMappingPolicy, ImageMapping),
333 (
"exposures", expMappingPolicy, ExposureMapping),
334 (
"calibrations", calMappingPolicy, CalibrationMapping),
335 (
"datasets", dsMappingPolicy, DatasetMapping)
338 for name, defPolicy, cls
in mappingList:
340 datasets = policy[name]
343 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
344 if os.path.exists(defaultsPath):
345 datasets.merge(dafPersist.Policy(defaultsPath))
348 setattr(self, name, mappings)
349 for datasetType
in datasets.names(
True):
350 subPolicy = datasets[datasetType]
351 subPolicy.merge(defPolicy)
353 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
354 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
355 subPolicy=subPolicy):
356 components = subPolicy.get(
'composite')
357 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None
358 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None
359 python = subPolicy[
'python']
360 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
361 disassembler=disassembler,
365 for name, component
in components.items():
366 butlerComposite.add(id=name,
367 datasetType=component.get(
'datasetType'),
368 setter=component.get(
'setter',
None),
369 getter=component.get(
'getter',
None),
370 subset=component.get(
'subset',
False),
371 inputOnly=component.get(
'inputOnly',
False))
372 return butlerComposite
373 setattr(self,
"map_" + datasetType, compositeClosure)
378 if name ==
"calibrations":
379 mapping = cls(datasetType, subPolicy, self.
registryregistry, self.
calibRegistrycalibRegistry, calibStorage,
380 provided=provided, dataRoot=rootStorage)
382 mapping = cls(datasetType, subPolicy, self.
registryregistry, rootStorage, provided=provided)
384 if datasetType
in self.
mappingsmappings:
385 raise ValueError(f
"Duplicate mapping policy for dataset type {datasetType}")
386 self.
keyDictkeyDict.update(mapping.keys())
387 mappings[datasetType] = mapping
388 self.
mappingsmappings[datasetType] = mapping
389 if not hasattr(self,
"map_" + datasetType):
390 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
391 return mapping.map(mapper, dataId, write)
392 setattr(self,
"map_" + datasetType, mapClosure)
393 if not hasattr(self,
"query_" + datasetType):
394 def queryClosure(format, dataId, mapping=mapping):
395 return mapping.lookup(format, dataId)
396 setattr(self,
"query_" + datasetType, queryClosure)
397 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
398 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
399 return mapping.standardize(mapper, item, dataId)
400 setattr(self,
"std_" + datasetType, stdClosure)
402 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
403 """Set convenience methods on CameraMapper"""
404 mapName =
"map_" + datasetType +
"_" + suffix
405 bypassName =
"bypass_" + datasetType +
"_" + suffix
406 queryName =
"query_" + datasetType +
"_" + suffix
407 if not hasattr(self, mapName):
408 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
409 if not hasattr(self, bypassName):
410 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
411 bypassImpl = getattr(self,
"bypass_" + datasetType)
412 if bypassImpl
is not None:
413 setattr(self, bypassName, bypassImpl)
414 if not hasattr(self, queryName):
415 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
418 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
419 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
421 if subPolicy[
"storage"] ==
"FitsStorage":
422 def getMetadata(datasetType, pythonType, location, dataId):
423 md = readMetadata(location.getLocationsWithRoot()[0])
427 setMethods(
"md", bypassImpl=getMetadata)
430 addName =
"add_" + datasetType
431 if not hasattr(self, addName):
434 if name ==
"exposures":
435 def getSkyWcs(datasetType, pythonType, location, dataId):
436 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
437 return fitsReader.readWcs()
439 setMethods(
"wcs", bypassImpl=getSkyWcs)
441 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
442 """Create a SkyWcs from the un-modified raw
443 FITS WCS header keys."""
444 if datasetType[:3] !=
"raw":
445 raise dafPersist.NoResults(
"Can only get header WCS for raw exposures.",
447 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
449 setMethods(
"header_wcs", bypassImpl=getRawHeaderWcs)
451 def getPhotoCalib(datasetType, pythonType, location, dataId):
452 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
453 return fitsReader.readPhotoCalib()
455 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
457 def getVisitInfo(datasetType, pythonType, location, dataId):
458 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
459 return fitsReader.readVisitInfo()
461 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
464 def getFilter(datasetType, pythonType, location, dataId):
465 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
466 return fitsReader.readFilter()
468 setMethods(
"filter", bypassImpl=getFilter)
471 def getFilterLabel(datasetType, pythonType, location, dataId):
472 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
473 storedFilter = fitsReader.readFilterLabel()
478 idFilter = mapping.need([
'filter'], dataId)[
'filter']
479 except dafPersist.NoResults:
481 bestFilter = self.
_getBestFilter_getBestFilter(storedFilter, idFilter)
482 if bestFilter
is not None:
487 setMethods(
"filterLabel", bypassImpl=getFilterLabel)
489 setMethods(
"detector",
490 mapImpl=
lambda dataId, write=
False:
491 dafPersist.ButlerLocation(
492 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
494 storageName=
"Internal",
495 locationList=
"ignored",
500 bypassImpl=
lambda datasetType, pythonType, location, dataId:
504 def getBBox(datasetType, pythonType, location, dataId):
505 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
507 return afwImage.bboxFromMetadata(md)
509 setMethods(
"bbox", bypassImpl=getBBox)
511 elif name ==
"images":
512 def getBBox(datasetType, pythonType, location, dataId):
513 md = readMetadata(location.getLocationsWithRoot()[0])
515 return afwImage.bboxFromMetadata(md)
516 setMethods(
"bbox", bypassImpl=getBBox)
518 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
520 def getMetadata(datasetType, pythonType, location, dataId):
521 md = readMetadata(os.path.join(location.getStorage().root,
522 location.getLocations()[0]), hdu=1)
526 setMethods(
"md", bypassImpl=getMetadata)
529 if subPolicy[
"storage"] ==
"FitsStorage":
530 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
531 subId = dataId.copy()
533 loc = mapping.map(mapper, subId, write)
534 bbox = dataId[
'bbox']
535 llcX = bbox.getMinX()
536 llcY = bbox.getMinY()
537 width = bbox.getWidth()
538 height = bbox.getHeight()
539 loc.additionalData.set(
'llcX', llcX)
540 loc.additionalData.set(
'llcY', llcY)
541 loc.additionalData.set(
'width', width)
542 loc.additionalData.set(
'height', height)
543 if 'imageOrigin' in dataId:
544 loc.additionalData.set(
'imageOrigin',
545 dataId[
'imageOrigin'])
548 def querySubClosure(key, format, dataId, mapping=mapping):
549 subId = dataId.copy()
551 return mapping.lookup(format, subId)
552 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
554 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
557 def getLen(datasetType, pythonType, location, dataId):
558 md = readMetadata(os.path.join(location.getStorage().root,
559 location.getLocations()[0]), hdu=1)
563 setMethods(
"len", bypassImpl=getLen)
566 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
567 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
568 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
569 location.getLocations()[0])))
571 def _computeCcdExposureId(self, dataId):
572 """Compute the 64-bit (long) identifier for a CCD exposure.
574 Subclasses must override
579 Data identifier with visit, ccd.
581 raise NotImplementedError()
583 def _computeCoaddExposureId(self, dataId, singleFilter):
584 """Compute the 64-bit (long) identifier for a coadd.
586 Subclasses must override
591 Data identifier with tract and patch.
592 singleFilter : `bool`
593 True means the desired ID is for a single-filter coadd, in which
594 case dataIdmust contain filter.
596 raise NotImplementedError()
598 def _search(self, path):
599 """Search for path in the associated repository's storage.
604 Path that describes an object in the repository associated with
606 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
607 indicator will be stripped when searching and so will match
608 filenames without the HDU indicator, e.g. 'foo.fits'. The path
609 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
614 The path for this object in the repository. Will return None if the
615 object can't be found. If the input argument path contained an HDU
616 indicator, the returned path will also contain the HDU indicator.
621 """Rename any existing object with the given type and dataId.
623 The CameraMapper implementation saves objects in a sequence of e.g.:
629 All of the backups will be placed in the output repo, however, and will
630 not be removed if they are found elsewhere in the _parent chain. This
631 means that the same file will be stored twice if the previous version
632 was found in an input repo.
641 def firstElement(list):
642 """Get the first element in the list, or None if that can't be
645 return list[0]
if list
is not None and len(list)
else None
648 newLocation = self.map(datasetType, dataId, write=
True)
649 newPath = newLocation.getLocations()[0]
650 path = dafPersist.PosixStorage.search(self.
rootroot, newPath, searchParents=
True)
651 path = firstElement(path)
653 while path
is not None:
655 oldPaths.append((n, path))
656 path = dafPersist.PosixStorage.search(self.
rootroot,
"%s~%d" % (newPath, n), searchParents=
True)
657 path = firstElement(path)
658 for n, oldPath
in reversed(oldPaths):
659 self.
rootStoragerootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
662 """Return supported keys.
667 List of keys usable in a dataset identifier
672 """Return a dict of supported keys and their value types for a given
673 dataset type at a given level of the key hierarchy.
678 Dataset type or None for all dataset types.
679 level : `str` or None
680 Level or None for all levels or '' for the default level for the
686 Keys are strings usable in a dataset identifier, values are their
695 if datasetType
is None:
696 keyDict = copy.copy(self.
keyDictkeyDict)
699 if level
is not None and level
in self.
levelslevels:
700 keyDict = copy.copy(keyDict)
701 for lev
in self.
levelslevels[level]:
716 """Return the name of the camera that this CameraMapper is for."""
718 className = className[className.find(
'.'):-1]
719 m = re.search(
r'(\w+)Mapper', className)
721 m = re.search(
r"class '[\w.]*?(\w+)'", className)
723 return name[:1].lower() + name[1:]
if name
else ''
727 """Return the name of the package containing this CameraMapper."""
729 raise ValueError(
'class variable packageName must not be None')
734 """Return the gen3 Instrument class equivalent for this gen2 Mapper.
739 A `~lsst.obs.base.Instrument` class.
742 raise NotImplementedError(
"Please provide a specific implementation for your instrument"
743 " to enable conversion of this gen2 repository to gen3")
748 raise ValueError(f
"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}"
749 " but that is not an lsst.obs.base.Instrument")
754 """Return the base directory of this package"""
758 """Map a camera dataset."""
759 if self.
cameracamera
is None:
760 raise RuntimeError(
"No camera dataset available.")
762 return dafPersist.ButlerLocation(
763 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
765 storageName=
"ConfigStorage",
773 """Return the (preloaded) camera object.
775 if self.
cameracamera
is None:
776 raise RuntimeError(
"No camera dataset available.")
780 return dafPersist.ButlerLocation(
781 pythonType=
"lsst.obs.base.ExposureIdInfo",
783 storageName=
"Internal",
784 locationList=
"ignored",
791 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
792 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
793 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
797 """Disable standardization for bfKernel
799 bfKernel is a calibration product that is numpy array,
800 unlike other calibration products that are all images;
801 all calibration images are sent through _standardizeExposure
802 due to CalibrationMapping, but we don't want that to happen to bfKernel
807 """Standardize a raw dataset by converting it to an Exposure instead
810 trimmed=
False, setVisitInfo=
True)
813 """Map a sky policy."""
814 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
815 "Internal",
None,
None, self,
819 """Standardize a sky policy by returning the one we use."""
820 return self.skypolicy
828 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
830 """Set up a registry (usually SQLite3), trying a number of possible
838 Description of registry (for log messages)
842 Policy that contains the registry name, used if path is None.
844 Key in policy for registry path.
845 storage : Storage subclass
846 Repository Storage to look in.
847 searchParents : bool, optional
848 True if the search for a registry should follow any Butler v1
850 posixIfNoSql : bool, optional
851 If an sqlite registry is not found, will create a posix registry if
856 lsst.daf.persistence.Registry
859 if path
is None and policyKey
in policy:
860 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
861 if os.path.isabs(path):
862 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
863 if not storage.exists(path):
864 newPath = storage.instanceSearch(path)
866 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None
868 self.
loglog.warn(
"Unable to locate registry at policy path (also looked in root): %s",
872 self.
loglog.warn(
"Unable to locate registry at policy path: %s", path)
881 if path
and (path.startswith(root)):
882 path = path[len(root +
'/'):]
883 except AttributeError:
890 def search(filename, description):
891 """Search for file in storage
896 Filename to search for
898 Description of file, for error message.
902 path : `str` or `None`
903 Path to file, or None
905 result = storage.instanceSearch(filename)
908 self.
loglog.debug(
"Unable to locate %s: %s", description, filename)
913 path = search(
"%s.pgsql" % name,
"%s in root" % description)
915 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
917 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
920 if not storage.exists(path):
921 newPath = storage.instanceSearch(path)
922 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None
923 if newPath
is not None:
925 localFileObj = storage.getLocalFile(path)
926 self.
loglog.info(
"Loading %s registry from %s", description, localFileObj.name)
927 registry = dafPersist.Registry.create(localFileObj.name)
929 elif not registry
and posixIfNoSql:
931 self.
loglog.info(
"Loading Posix %s registry from %s", description, storage.root)
932 registry = dafPersist.PosixRegistry(storage.root)
938 def _transformId(self, dataId):
939 """Generate a standard ID dict from a camera-specific ID dict.
941 Canonical keys include:
942 - amp: amplifier name
943 - ccd: CCD name (in LSST this is a combination of raft and sensor)
944 The default implementation returns a copy of its input.
949 Dataset identifier; this must not be modified
954 Transformed dataset identifier.
959 def _mapActualToPath(self, template, actualId):
960 """Convert a template path to an actual path, using the actual data
961 identifier. This implementation is usually sufficient but can be
962 overridden by the subclass.
979 return template % transformedId
980 except Exception
as e:
981 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
985 """Convert a CCD name to a form useful as a filename
987 The default implementation converts spaces to underscores.
989 return ccdName.replace(
" ",
"_")
991 def _extractDetectorName(self, dataId):
992 """Extract the detector (CCD) name from the dataset identifier.
994 The name in question is the detector name used by lsst.afw.cameraGeom.
1006 raise NotImplementedError(
"No _extractDetectorName() function specified")
1008 def _setAmpDetector(self, item, dataId, trimmed=True):
1009 """Set the detector object in an Exposure for an amplifier.
1011 Defects are also added to the Exposure based on the detector object.
1015 item : `lsst.afw.image.Exposure`
1016 Exposure to set the detector in.
1020 Should detector be marked as trimmed? (ignored)
1023 return self.
_setCcdDetector_setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1025 def _setCcdDetector(self, item, dataId, trimmed=True):
1026 """Set the detector object in an Exposure for a CCD.
1030 item : `lsst.afw.image.Exposure`
1031 Exposure to set the detector in.
1035 Should detector be marked as trimmed? (ignored)
1037 if item.getDetector()
is not None:
1041 detector = self.
cameracamera[detectorName]
1042 item.setDetector(detector)
1045 def _resolveFilters(definitions, idFilter, filterLabel):
1046 """Identify the filter(s) consistent with partial filter information.
1050 definitions : `lsst.obs.base.FilterDefinitionCollection`
1051 The filter definitions in which to search for filters.
1052 idFilter : `str` or `None`
1053 The filter information provided in a data ID.
1054 filterLabel : `lsst.afw.image.FilterLabel` or `None`
1055 The filter information provided by an exposure; may be incomplete.
1059 filters : `set` [`lsst.obs.base.FilterDefinition`]
1060 The set of filters consistent with ``idFilter``
1061 and ``filterLabel``.
1066 matches = set(definitions)
1067 if idFilter
is not None:
1068 matches.intersection_update(definitions.findAll(idFilter))
1069 if filterLabel
is not None and filterLabel.hasPhysicalLabel():
1070 matches.intersection_update(definitions.findAll(filterLabel.physicalLabel))
1071 if filterLabel
is not None and filterLabel.hasBandLabel():
1072 matches.intersection_update(definitions.findAll(filterLabel.bandLabel))
1075 def _getBestFilter(self, storedLabel, idFilter):
1076 """Estimate the most complete filter information consistent with the
1081 storedLabel : `lsst.afw.image.FilterLabel` or `None`
1082 The filter previously stored in the file.
1083 idFilter : `str` or `None`
1084 The filter implied by the data ID, if any.
1088 bestFitler : `lsst.afw.image.FilterLabel` or `None`
1089 The complete filter to describe the dataset. May be equal to
1090 ``storedLabel``. `None` if no recommendation can be generated.
1094 filterDefinitions = self.
getGen3InstrumentgetGen3Instrument()().filterDefinitions
1095 except NotImplementedError:
1096 filterDefinitions =
None
1098 if filterDefinitions
is not None:
1099 definitions = self.
_resolveFilters_resolveFilters(filterDefinitions, idFilter, storedLabel)
1100 self.
loglog.debug(
"Matching filters for id=%r and label=%r are %s.",
1101 idFilter, storedLabel, definitions)
1102 if len(definitions) == 1:
1103 newLabel = list(definitions)[0].makeFilterLabel()
1106 self.
loglog.warn(
"Multiple matches for filter %r with data ID %r.", storedLabel, idFilter)
1109 bands = {d.band
for d
in definitions}
1110 if len(bands) == 1
and storedLabel
is None:
1111 band = list(bands)[0]
1112 return afwImage.FilterLabel(band=band)
1117 self.
loglog.warn(
"Cannot reconcile filter %r with data ID %r.", storedLabel, idFilter)
1124 def _setFilter(self, mapping, item, dataId):
1125 """Set the filter information in an Exposure.
1127 The Exposure should already have had a filter loaded, but the reader
1128 (in ``afw``) had to act on incomplete information. This method
1129 cross-checks the filter against the data ID and the standard list
1134 mapping : `lsst.obs.base.Mapping`
1135 Where to get the data ID filter from.
1136 item : `lsst.afw.image.Exposure`
1137 Exposure to set the filter in.
1141 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
1142 or isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1145 itemFilter = item.getFilterLabel()
1147 idFilter = mapping.need([
'filter'], dataId)[
'filter']
1148 except dafPersist.NoResults:
1151 bestFilter = self.
_getBestFilter_getBestFilter(itemFilter, idFilter)
1152 if bestFilter
is not None:
1153 if bestFilter != itemFilter:
1154 item.setFilterLabel(bestFilter)
1156 elif itemFilter
is None:
1158 if self.
filtersfilters
is not None and idFilter
in self.
filtersfilters:
1159 idFilter = self.
filtersfilters[idFilter]
1163 with warnings.catch_warnings():
1164 warnings.filterwarnings(
"ignore", category=FutureWarning)
1165 item.setFilter(afwImage.Filter(idFilter))
1166 except pexExcept.NotFoundError:
1167 self.
loglog.warn(
"Filter %s not defined. Set to UNKNOWN.", idFilter)
1169 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1170 trimmed=True, setVisitInfo=True):
1171 """Default standardization function for images.
1173 This sets the Detector from the camera geometry
1174 and optionally set the Filter. In both cases this saves
1175 having to persist some data in each exposure (or image).
1179 mapping : `lsst.obs.base.Mapping`
1180 Where to get the values from.
1181 item : image-like object
1182 Can be any of lsst.afw.image.Exposure,
1183 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1184 or lsst.afw.image.MaskedImage
1189 Set filter? Ignored if item is already an exposure
1191 Should detector be marked as trimmed?
1192 setVisitInfo : `bool`
1193 Should Exposure have its VisitInfo filled out from the metadata?
1197 `lsst.afw.image.Exposure`
1198 The standardized Exposure.
1202 setVisitInfo=setVisitInfo)
1203 except Exception
as e:
1204 self.
loglog.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1207 if mapping.level.lower() ==
"amp":
1209 elif mapping.level.lower() ==
"ccd":
1215 if mapping.level.lower() !=
"amp" and exposure.getWcs()
is None and \
1216 (exposure.getInfo().getVisitInfo()
is not None or exposure.getMetadata().toDict()):
1220 self.
_setFilter_setFilter(mapping, exposure, dataId)
1224 def _createSkyWcsFromMetadata(self, exposure):
1225 """Create a SkyWcs from the FITS header metadata in an Exposure.
1229 exposure : `lsst.afw.image.Exposure`
1230 The exposure to get metadata from, and attach the SkyWcs to.
1232 metadata = exposure.getMetadata()
1233 fix_header(metadata, translator_class=self.
translatorClasstranslatorClass)
1235 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1236 exposure.setWcs(wcs)
1237 except pexExcept.TypeError
as e:
1240 self.
loglog.debug(
"wcs set to None; missing information found in metadata to create a valid wcs:"
1244 exposure.setMetadata(metadata)
1246 def _createInitialSkyWcs(self, exposure):
1247 """Create a SkyWcs from the boresight and camera geometry.
1249 If the boresight or camera geometry do not support this method of
1250 WCS creation, this falls back on the header metadata-based version
1251 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1255 exposure : `lsst.afw.image.Exposure`
1256 The exposure to get data from, and attach the SkyWcs to.
1261 if exposure.getInfo().getVisitInfo()
is None:
1262 msg =
"No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1263 self.
loglog.warn(msg)
1266 newSkyWcs =
createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1267 exposure.setWcs(newSkyWcs)
1268 except InitialSkyWcsError
as e:
1269 msg =
"Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1270 self.
loglog.warn(msg, e)
1271 self.
loglog.debug(
"Exception was: %s", traceback.TracebackException.from_exception(e))
1272 if e.__context__
is not None:
1273 self.
loglog.debug(
"Root-cause Exception was: %s",
1274 traceback.TracebackException.from_exception(e.__context__))
1276 def _makeCamera(self, policy, repositoryDir):
1277 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1280 Also set self.cameraDataLocation, if relevant (else it can be left
1283 This implementation assumes that policy contains an entry "camera"
1284 that points to the subdirectory in this package of camera data;
1285 specifically, that subdirectory must contain:
1286 - a file named `camera.py` that contains persisted camera config
1287 - ampInfo table FITS files, as required by
1288 lsst.afw.cameraGeom.makeCameraFromPath
1292 policy : `lsst.daf.persistence.Policy`
1293 Policy with per-camera defaults already merged
1294 (PexPolicy only for backward compatibility).
1295 repositoryDir : `str`
1296 Policy repository for the subclassing module (obtained with
1297 getRepositoryPath() on the per-camera default dictionary).
1299 if 'camera' not in policy:
1300 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1301 cameraDataSubdir = policy[
'camera']
1303 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1304 cameraConfig = afwCameraGeom.CameraConfig()
1307 return afwCameraGeom.makeCameraFromPath(
1308 cameraConfig=cameraConfig,
1309 ampInfoPath=ampInfoPath,
1315 """Get the registry used by this mapper.
1320 The registry used by this mapper for this mapper's repository.
1325 """Stuff image compression settings into a daf.base.PropertySet
1327 This goes into the ButlerLocation's "additionalData", which gets
1328 passed into the boost::persistence framework.
1333 Type of dataset for which to get the image compression settings.
1339 additionalData : `lsst.daf.base.PropertySet`
1340 Image compression settings.
1342 mapping = self.
mappingsmappings[datasetType]
1343 recipeName = mapping.recipe
1344 storageType = mapping.storage
1346 return dafBase.PropertySet()
1347 if recipeName
not in self.
_writeRecipes_writeRecipes[storageType]:
1348 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1349 (datasetType, storageType, recipeName))
1350 recipe = self.
_writeRecipes_writeRecipes[storageType][recipeName].deepCopy()
1351 seed = hash(tuple(dataId.items())) % 2**31
1352 for plane
in (
"image",
"mask",
"variance"):
1353 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1354 recipe.set(plane +
".scaling.seed", seed)
1357 def _initWriteRecipes(self):
1358 """Read the recipes for writing files
1360 These recipes are currently used for configuring FITS compression,
1361 but they could have wider uses for configuring different flavors
1362 of the storage types. A recipe is referred to by a symbolic name,
1363 which has associated settings. These settings are stored as a
1364 `PropertySet` so they can easily be passed down to the
1365 boost::persistence framework as the "additionalData" parameter.
1367 The list of recipes is written in YAML. A default recipe and
1368 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1369 and these may be overridden or supplemented by the individual obs_*
1370 packages' own policy/writeRecipes.yaml files.
1372 Recipes are grouped by the storage type. Currently, only the
1373 ``FitsStorage`` storage type uses recipes, which uses it to
1374 configure FITS image compression.
1376 Each ``FitsStorage`` recipe for FITS compression should define
1377 "image", "mask" and "variance" entries, each of which may contain
1378 "compression" and "scaling" entries. Defaults will be provided for
1379 any missing elements under "compression" and "scaling".
1381 The allowed entries under "compression" are:
1383 * algorithm (string): compression algorithm to use
1384 * rows (int): number of rows per tile (0 = entire dimension)
1385 * columns (int): number of columns per tile (0 = entire dimension)
1386 * quantizeLevel (float): cfitsio quantization level
1388 The allowed entries under "scaling" are:
1390 * algorithm (string): scaling algorithm to use
1391 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1392 * fuzz (bool): fuzz the values when quantising floating-point values?
1393 * seed (long): seed for random number generator when fuzzing
1394 * maskPlanes (list of string): mask planes to ignore when doing
1396 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1397 * quantizePad: number of stdev to allow on the low side (for
1398 STDEV_POSITIVE/NEGATIVE)
1399 * bscale: manually specified BSCALE (for MANUAL scaling)
1400 * bzero: manually specified BSCALE (for MANUAL scaling)
1402 A very simple example YAML recipe:
1408 algorithm: GZIP_SHUFFLE
1412 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1413 recipes = dafPersist.Policy(recipesFile)
1414 supplementsFile = os.path.join(self.
getPackageDirgetPackageDir(),
"policy",
"writeRecipes.yaml")
1415 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1416 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1417 supplements = dafPersist.Policy(supplementsFile)
1419 for entry
in validationMenu:
1420 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1422 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1423 (supplementsFile, entry, recipesFile, intersection))
1424 recipes.update(supplements)
1427 for storageType
in recipes.names(
True):
1428 if "default" not in recipes[storageType]:
1429 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1430 (storageType, recipesFile))
1431 self.
_writeRecipes_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1435 """Generate an Exposure from an image-like object
1437 If the image is a DecoratedImage then also set its WCS and metadata
1438 (Image and MaskedImage are missing the necessary metadata
1439 and Exposure already has those set)
1443 image : Image-like object
1444 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1449 `lsst.afw.image.Exposure`
1450 Exposure containing input image.
1452 translatorClass =
None
1453 if mapper
is not None:
1454 translatorClass = mapper.translatorClass
1457 if isinstance(image, afwImage.MaskedImage):
1458 exposure = afwImage.makeExposure(image)
1459 elif isinstance(image, afwImage.DecoratedImage):
1460 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1461 metadata = image.getMetadata()
1462 fix_header(metadata, translator_class=translatorClass)
1463 exposure.setMetadata(metadata)
1464 elif isinstance(image, afwImage.Exposure):
1466 metadata = exposure.getMetadata()
1467 fix_header(metadata, translator_class=translatorClass)
1469 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1472 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1473 if metadata
is not None:
1476 logger = lsstLog.Log.getLogger(
"CameraMapper")
1477 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1479 exposureId = mapper._computeCcdExposureId(dataId)
1480 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1482 exposure.getInfo().setVisitInfo(visitInfo)
1488 """Validate recipes for FitsStorage
1490 The recipes are supplemented with default values where appropriate.
1492 TODO: replace this custom validation code with Cerberus (DM-11846)
1496 recipes : `lsst.daf.persistence.Policy`
1497 FitsStorage recipes to validate.
1501 validated : `lsst.daf.base.PropertySet`
1502 Validated FitsStorage recipe.
1507 If validation fails.
1511 compressionSchema = {
1512 "algorithm":
"NONE",
1515 "quantizeLevel": 0.0,
1518 "algorithm":
"NONE",
1520 "maskPlanes": [
"NO_DATA"],
1522 "quantizeLevel": 4.0,
1529 def checkUnrecognized(entry, allowed, description):
1530 """Check to see if the entry contains unrecognised keywords"""
1531 unrecognized = set(entry.keys()) - set(allowed)
1534 "Unrecognized entries when parsing image compression recipe %s: %s" %
1535 (description, unrecognized))
1538 for name
in recipes.names(
True):
1539 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1540 rr = dafBase.PropertySet()
1541 validated[name] = rr
1542 for plane
in (
"image",
"mask",
"variance"):
1543 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1544 name +
"->" + plane)
1546 for settings, schema
in ((
"compression", compressionSchema),
1547 (
"scaling", scalingSchema)):
1548 prefix = plane +
"." + settings
1549 if settings
not in recipes[name][plane]:
1551 rr.set(prefix +
"." + key, schema[key])
1553 entry = recipes[name][plane][settings]
1554 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1556 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1557 rr.set(prefix +
"." + key, value)
def getDefaultSubLevel(self, level)
def backup(self, datasetType, dataId)
def _getBestFilter(self, storedLabel, idFilter)
def _makeCamera(self, policy, repositoryDir)
def _setAmpDetector(self, item, dataId, trimmed=True)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def std_bfKernel(self, item, dataId)
def _resolveFilters(definitions, idFilter, filterLabel)
def std_skypolicy(self, item, dataId)
def map_skypolicy(self, dataId)
def _transformId(self, dataId)
def getImageCompressionSettings(self, datasetType, dataId)
def _extractDetectorName(self, dataId)
def _initWriteRecipes(self)
def _createSkyWcsFromMetadata(self, exposure)
def map_camera(self, dataId, write=False)
def getShortCcdName(ccdName)
def _setFilter(self, mapping, item, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _createInitialSkyWcs(self, exposure)
def map_expIdInfo(self, dataId, write=False)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def std_raw(self, item, dataId)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def getKeys(self, datasetType, level)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def getGen3Instrument(cls)
def _setCcdDetector(self, item, dataId, trimmed=True)
def getDefaultLevel(self)
def validateRecipeFitsStorage(recipes)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def createInitialSkyWcs(visitInfo, detector, flipX=False)