27 import lsst.daf.persistence
as dafPersist
28 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
29 import lsst.daf.base
as dafBase
30 import lsst.afw.geom
as afwGeom
31 import lsst.afw.image
as afwImage
32 import lsst.afw.table
as afwTable
33 from lsst.afw.fits
import readMetadata
34 import lsst.afw.cameraGeom
as afwCameraGeom
35 import lsst.log
as lsstLog
37 from .exposureIdInfo
import ExposureIdInfo
38 from .makeRawVisitInfo
import MakeRawVisitInfo
41 __all__ = [
"CameraMapper",
"exposureFromImage"]
46 """CameraMapper is a base class for mappers that handle images from a 47 camera and products derived from them. This provides an abstraction layer 48 between the data on disk and the code. 50 Public methods: keys, queryMetadata, getDatasetTypes, map, 51 canStandardize, standardize 53 Mappers for specific data sources (e.g., CFHT Megacam, LSST 54 simulations, etc.) should inherit this class. 56 The CameraMapper manages datasets within a "root" directory. Note that 57 writing to a dataset present in the input root will hide the existing 58 dataset but not overwrite it. See #2160 for design discussion. 60 A camera is assumed to consist of one or more rafts, each composed of 61 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 62 (amps). A camera is also assumed to have a camera geometry description 63 (CameraGeom object) as a policy file, a filter description (Filter class 64 static configuration) as another policy file. 66 Information from the camera geometry and defects are inserted into all 67 Exposure objects returned. 69 The mapper uses one or two registries to retrieve metadata about the 70 images. The first is a registry of all raw exposures. This must contain 71 the time of the observation. One or more tables (or the equivalent) 72 within the registry are used to look up data identifier components that 73 are not specified by the user (e.g. filter) and to return results for 74 metadata queries. The second is an optional registry of all calibration 75 data. This should contain validity start and end entries for each 76 calibration dataset in the same timescale as the observation time. 78 Subclasses will typically set MakeRawVisitInfoClass: 80 MakeRawVisitInfoClass: a class variable that points to a subclass of 81 MakeRawVisitInfo, a functor that creates an 82 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 84 Subclasses must provide the following methods: 86 _extractDetectorName(self, dataId): returns the detector name for a CCD 87 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 88 a dataset identifier referring to that CCD or a subcomponent of it. 90 _computeCcdExposureId(self, dataId): see below 92 _computeCoaddExposureId(self, dataId, singleFilter): see below 94 Subclasses may also need to override the following methods: 96 _transformId(self, dataId): transformation of a data identifier 97 from colloquial usage (e.g., "ccdname") to proper/actual usage 98 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 99 commas). The default implementation does nothing. Note that this 100 method should not modify its input parameter. 102 getShortCcdName(self, ccdName): a static method that returns a shortened 103 name suitable for use as a filename. The default version converts spaces 106 _mapActualToPath(self, template, actualId): convert a template path to an 107 actual path, using the actual dataset identifier. 109 The mapper's behaviors are largely specified by the policy file. 110 See the MapperDictionary.paf for descriptions of the available items. 112 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 113 mappings (see Mappings class). 115 Common default mappings for all subclasses can be specified in the 116 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 117 provides a simple way to add a product to all camera mappers. 119 Functions to map (provide a path to the data given a dataset 120 identifier dictionary) and standardize (convert data into some standard 121 format or type) may be provided in the subclass as "map_{dataset type}" 122 and "std_{dataset type}", respectively. 124 If non-Exposure datasets cannot be retrieved using standard 125 daf_persistence methods alone, a "bypass_{dataset type}" function may be 126 provided in the subclass to return the dataset instead of using the 127 "datasets" subpolicy. 129 Implementations of map_camera and bypass_camera that should typically be 130 sufficient are provided in this base class. 136 - Instead of auto-loading the camera at construction time, load it from 137 the calibration registry 143 MakeRawVisitInfoClass = MakeRawVisitInfo
146 PupilFactoryClass = afwCameraGeom.PupilFactory
148 def __init__(self, policy, repositoryDir,
149 root=None, registry=None, calibRoot=None, calibRegistry=None,
150 provided=None, parentRegistry=None, repositoryCfg=None):
151 """Initialize the CameraMapper. 155 policy : daf_persistence.Policy, 156 Policy with per-camera defaults already merged. 157 repositoryDir : string 158 Policy repository for the subclassing module (obtained with 159 getRepositoryPath() on the per-camera default dictionary). 160 root : string, optional 161 Path to the root directory for data. 162 registry : string, optional 163 Path to registry with data's metadata. 164 calibRoot : string, optional 165 Root directory for calibrations. 166 calibRegistry : string, optional 167 Path to registry with calibrations' metadata. 168 provided : list of string, optional 169 Keys provided by the mapper. 170 parentRegistry : Registry subclass, optional 171 Registry from a parent repository that may be used to look up 173 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 174 The configuration information for the repository this mapper is 178 dafPersist.Mapper.__init__(self)
180 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
185 self.
root = repositoryCfg.root
189 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 190 if repoPolicy
is not None:
191 policy.update(repoPolicy)
195 if 'levels' in policy:
196 levelsPolicy = policy[
'levels']
197 for key
in levelsPolicy.names(
True):
198 self.
levels[key] = set(levelsPolicy.asArray(key))
201 if 'defaultSubLevels' in policy:
207 root = dafPersist.LogicalLocation(root).locString()
217 if calibRoot
is not None:
218 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
219 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
222 calibRoot = policy.get(
'calibRoot',
None)
224 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
226 if calibStorage
is None:
234 posixIfNoSql=(
not parentRegistry))
237 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
238 if needCalibRegistry:
241 "calibRegistryPath", calibStorage,
245 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
246 "calibRoot ivar:%s or policy['calibRoot']:%s" %
247 (calibRoot, policy.get(
'calibRoot',
None)))
267 raise ValueError(
'class variable packageName must not be None')
271 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
272 """Initialize mappings 274 For each of the dataset types that we want to be able to read, there 275 are methods that can be created to support them: 276 * map_<dataset> : determine the path for dataset 277 * std_<dataset> : standardize the retrieved dataset 278 * bypass_<dataset> : retrieve the dataset (bypassing the usual 280 * query_<dataset> : query the registry 282 Besides the dataset types explicitly listed in the policy, we create 283 additional, derived datasets for additional conveniences, 284 e.g., reading the header of an image, retrieving only the size of a 289 policy : `lsst.daf.persistence.Policy` 290 Policy with per-camera defaults already merged 291 rootStorage : `Storage subclass instance` 292 Interface to persisted repository data. 293 calibRoot : `Storage subclass instance` 294 Interface to persisted calib repository data 295 provided : `list` of `str` 296 Keys provided by the mapper 299 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
300 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
301 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
302 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
303 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
304 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
305 dsMappingPolicy = dafPersist.Policy()
309 (
"images", imgMappingPolicy, ImageMapping),
310 (
"exposures", expMappingPolicy, ExposureMapping),
311 (
"calibrations", calMappingPolicy, CalibrationMapping),
312 (
"datasets", dsMappingPolicy, DatasetMapping)
315 for name, defPolicy, cls
in mappingList:
317 datasets = policy[name]
320 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
321 if os.path.exists(defaultsPath):
322 datasets.merge(dafPersist.Policy(defaultsPath))
325 setattr(self, name, mappings)
326 for datasetType
in datasets.names(
True):
327 subPolicy = datasets[datasetType]
328 subPolicy.merge(defPolicy)
330 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
331 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
332 subPolicy=subPolicy):
333 components = subPolicy.get(
'composite')
334 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 335 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 336 python = subPolicy[
'python']
337 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
338 disassembler=disassembler,
342 for name, component
in components.items():
343 butlerComposite.add(id=name,
344 datasetType=component.get(
'datasetType'),
345 setter=component.get(
'setter',
None),
346 getter=component.get(
'getter',
None),
347 subset=component.get(
'subset',
False),
348 inputOnly=component.get(
'inputOnly',
False))
349 return butlerComposite
350 setattr(self,
"map_" + datasetType, compositeClosure)
354 if name ==
"calibrations":
356 provided=provided, dataRoot=rootStorage)
358 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
361 raise ValueError(f
"Duplicate mapping policy for dataset type {datasetType}")
362 self.
keyDict.update(mapping.keys())
363 mappings[datasetType] = mapping
364 self.
mappings[datasetType] = mapping
365 if not hasattr(self,
"map_" + datasetType):
366 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
367 return mapping.map(mapper, dataId, write)
368 setattr(self,
"map_" + datasetType, mapClosure)
369 if not hasattr(self,
"query_" + datasetType):
370 def queryClosure(format, dataId, mapping=mapping):
371 return mapping.lookup(format, dataId)
372 setattr(self,
"query_" + datasetType, queryClosure)
373 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
374 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
375 return mapping.standardize(mapper, item, dataId)
376 setattr(self,
"std_" + datasetType, stdClosure)
378 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
379 """Set convenience methods on CameraMapper""" 380 mapName =
"map_" + datasetType +
"_" + suffix
381 bypassName =
"bypass_" + datasetType +
"_" + suffix
382 queryName =
"query_" + datasetType +
"_" + suffix
383 if not hasattr(self, mapName):
384 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
385 if not hasattr(self, bypassName):
386 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
387 bypassImpl = getattr(self,
"bypass_" + datasetType)
388 if bypassImpl
is not None:
389 setattr(self, bypassName, bypassImpl)
390 if not hasattr(self, queryName):
391 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
394 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
395 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
397 if subPolicy[
"storage"] ==
"FitsStorage":
398 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
399 readMetadata(location.getLocationsWithRoot()[0]))
402 addName =
"add_" + datasetType
403 if not hasattr(self, addName):
406 if name ==
"exposures":
407 def getSkyWcs(datasetType, pythonType, location, dataId):
408 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
409 return fitsReader.readWcs()
411 setMethods(
"wcs", bypassImpl=getSkyWcs)
413 def getPhotoCalib(datasetType, pythonType, location, dataId):
414 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
415 return fitsReader.readPhotoCalib()
417 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
419 def getVisitInfo(datasetType, pythonType, location, dataId):
420 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
421 return fitsReader.readVisitInfo()
423 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
425 def getFilter(datasetType, pythonType, location, dataId):
426 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
427 return fitsReader.readFilter()
429 setMethods(
"filter", bypassImpl=getFilter)
431 setMethods(
"detector",
432 mapImpl=
lambda dataId, write=
False:
433 dafPersist.ButlerLocation(
434 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
436 storageName=
"Internal",
437 locationList=
"ignored",
442 bypassImpl=
lambda datasetType, pythonType, location, dataId:
445 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
446 afwImage.bboxFromMetadata(
447 readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
449 elif name ==
"images":
450 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
451 afwImage.bboxFromMetadata(
452 readMetadata(location.getLocationsWithRoot()[0])))
454 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
455 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
456 readMetadata(os.path.join(location.getStorage().root,
457 location.getLocations()[0]), hdu=1))
460 if subPolicy[
"storage"] ==
"FitsStorage":
461 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
462 subId = dataId.copy()
464 loc = mapping.map(mapper, subId, write)
465 bbox = dataId[
'bbox']
466 llcX = bbox.getMinX()
467 llcY = bbox.getMinY()
468 width = bbox.getWidth()
469 height = bbox.getHeight()
470 loc.additionalData.set(
'llcX', llcX)
471 loc.additionalData.set(
'llcY', llcY)
472 loc.additionalData.set(
'width', width)
473 loc.additionalData.set(
'height', height)
474 if 'imageOrigin' in dataId:
475 loc.additionalData.set(
'imageOrigin',
476 dataId[
'imageOrigin'])
479 def querySubClosure(key, format, dataId, mapping=mapping):
480 subId = dataId.copy()
482 return mapping.lookup(format, subId)
483 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
485 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
487 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
488 readMetadata(os.path.join(location.getStorage().root,
489 location.getLocations()[0]),
490 hdu=1).getScalar(
"NAXIS2"))
493 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
494 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
495 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
496 location.getLocations()[0])))
498 def _computeCcdExposureId(self, dataId):
499 """Compute the 64-bit (long) identifier for a CCD exposure. 501 Subclasses must override 506 Data identifier with visit, ccd. 508 raise NotImplementedError()
510 def _computeCoaddExposureId(self, dataId, singleFilter):
511 """Compute the 64-bit (long) identifier for a coadd. 513 Subclasses must override 518 Data identifier with tract and patch. 519 singleFilter : `bool` 520 True means the desired ID is for a single-filter coadd, in which 521 case dataIdmust contain filter. 523 raise NotImplementedError()
525 def _search(self, path):
526 """Search for path in the associated repository's storage. 531 Path that describes an object in the repository associated with 533 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 534 indicator will be stripped when searching and so will match 535 filenames without the HDU indicator, e.g. 'foo.fits'. The path 536 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 541 The path for this object in the repository. Will return None if the 542 object can't be found. If the input argument path contained an HDU 543 indicator, the returned path will also contain the HDU indicator. 548 """Rename any existing object with the given type and dataId. 550 The CameraMapper implementation saves objects in a sequence of e.g.: 556 All of the backups will be placed in the output repo, however, and will 557 not be removed if they are found elsewhere in the _parent chain. This 558 means that the same file will be stored twice if the previous version 559 was found in an input repo. 568 def firstElement(list):
569 """Get the first element in the list, or None if that can't be 572 return list[0]
if list
is not None and len(list)
else None 575 newLocation = self.map(datasetType, dataId, write=
True)
576 newPath = newLocation.getLocations()[0]
577 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
578 path = firstElement(path)
580 while path
is not None:
582 oldPaths.append((n, path))
583 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
584 path = firstElement(path)
585 for n, oldPath
in reversed(oldPaths):
586 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
589 """Return supported keys. 594 List of keys usable in a dataset identifier 599 """Return a dict of supported keys and their value types for a given 600 dataset type at a given level of the key hierarchy. 605 Dataset type or None for all dataset types. 606 level : `str` or None 607 Level or None for all levels or '' for the default level for the 613 Keys are strings usable in a dataset identifier, values are their 621 if datasetType
is None:
622 keyDict = copy.copy(self.
keyDict)
625 if level
is not None and level
in self.
levels:
626 keyDict = copy.copy(keyDict)
627 for l
in self.
levels[level]:
642 """Return the name of the camera that this CameraMapper is for.""" 644 className = className[className.find(
'.'):-1]
645 m = re.search(
r'(\w+)Mapper', className)
647 m = re.search(
r"class '[\w.]*?(\w+)'", className)
649 return name[:1].lower() + name[1:]
if name
else '' 653 """Return the name of the package containing this CameraMapper.""" 655 raise ValueError(
'class variable packageName must not be None')
660 """Return the base directory of this package""" 664 """Map a camera dataset.""" 666 raise RuntimeError(
"No camera dataset available.")
668 return dafPersist.ButlerLocation(
669 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
671 storageName=
"ConfigStorage",
679 """Return the (preloaded) camera object. 682 raise RuntimeError(
"No camera dataset available.")
686 return dafPersist.ButlerLocation(
687 pythonType=
"lsst.obs.base.ExposureIdInfo",
689 storageName=
"Internal",
690 locationList=
"ignored",
697 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 698 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
699 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
703 """Disable standardization for bfKernel 705 bfKernel is a calibration product that is numpy array, 706 unlike other calibration products that are all images; 707 all calibration images are sent through _standardizeExposure 708 due to CalibrationMapping, but we don't want that to happen to bfKernel 713 """Standardize a raw dataset by converting it to an Exposure instead 716 trimmed=
False, setVisitInfo=
True)
719 """Map a sky policy.""" 720 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
721 "Internal",
None,
None, self,
725 """Standardize a sky policy by returning the one we use.""" 726 return self.skypolicy
734 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
736 """Set up a registry (usually SQLite3), trying a number of possible 744 Description of registry (for log messages) 748 Policy that contains the registry name, used if path is None. 750 Key in policy for registry path. 751 storage : Storage subclass 752 Repository Storage to look in. 753 searchParents : bool, optional 754 True if the search for a registry should follow any Butler v1 756 posixIfNoSql : bool, optional 757 If an sqlite registry is not found, will create a posix registry if 762 lsst.daf.persistence.Registry 765 if path
is None and policyKey
in policy:
766 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
767 if os.path.isabs(path):
768 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
769 if not storage.exists(path):
770 newPath = storage.instanceSearch(path)
772 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 774 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
778 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
786 if path
and (path.startswith(root)):
787 path = path[len(root +
'/'):]
788 except AttributeError:
794 def search(filename, description):
795 """Search for file in storage 800 Filename to search for 802 Description of file, for error message. 806 path : `str` or `None` 807 Path to file, or None 809 result = storage.instanceSearch(filename)
812 self.
log.debug(
"Unable to locate %s: %s", description, filename)
817 path = search(
"%s.pgsql" % name,
"%s in root" % description)
819 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
821 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
824 if not storage.exists(path):
825 newPath = storage.instanceSearch(path)
826 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 827 if newPath
is not None:
829 localFileObj = storage.getLocalFile(path)
830 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
831 registry = dafPersist.Registry.create(localFileObj.name)
833 elif not registry
and posixIfNoSql:
835 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
836 registry = dafPersist.PosixRegistry(storage.root)
842 def _transformId(self, dataId):
843 """Generate a standard ID dict from a camera-specific ID dict. 845 Canonical keys include: 846 - amp: amplifier name 847 - ccd: CCD name (in LSST this is a combination of raft and sensor) 848 The default implementation returns a copy of its input. 853 Dataset identifier; this must not be modified 858 Transformed dataset identifier. 863 def _mapActualToPath(self, template, actualId):
864 """Convert a template path to an actual path, using the actual data 865 identifier. This implementation is usually sufficient but can be 866 overridden by the subclass. 883 return template % transformedId
884 except Exception
as e:
885 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
889 """Convert a CCD name to a form useful as a filename 891 The default implementation converts spaces to underscores. 893 return ccdName.replace(
" ",
"_")
895 def _extractDetectorName(self, dataId):
896 """Extract the detector (CCD) name from the dataset identifier. 898 The name in question is the detector name used by lsst.afw.cameraGeom. 910 raise NotImplementedError(
"No _extractDetectorName() function specified")
912 def _extractAmpId(self, dataId):
913 """Extract the amplifier identifer from a dataset identifier. 915 .. note:: Deprecated in 11_0 917 amplifier identifier has two parts: the detector name for the CCD 918 containing the amplifier and index of the amplifier in the detector. 932 return (trDataId[
"ccd"], int(trDataId[
'amp']))
934 def _setAmpDetector(self, item, dataId, trimmed=True):
935 """Set the detector object in an Exposure for an amplifier. 937 Defects are also added to the Exposure based on the detector object. 941 item : `lsst.afw.image.Exposure` 942 Exposure to set the detector in. 946 Should detector be marked as trimmed? (ignored) 951 def _setCcdDetector(self, item, dataId, trimmed=True):
952 """Set the detector object in an Exposure for a CCD. 956 item : `lsst.afw.image.Exposure` 957 Exposure to set the detector in. 961 Should detector be marked as trimmed? (ignored) 963 if item.getDetector()
is not None:
967 detector = self.
camera[detectorName]
968 item.setDetector(detector)
970 def _setFilter(self, mapping, item, dataId):
971 """Set the filter object in an Exposure. If the Exposure had a FILTER 972 keyword, this was already processed during load. But if it didn't, 973 use the filter from the registry. 977 mapping : `lsst.obs.base.Mapping` 978 Where to get the filter from. 979 item : `lsst.afw.image.Exposure` 980 Exposure to set the filter in. 985 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 986 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
989 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
992 actualId = mapping.need([
'filter'], dataId)
993 filterName = actualId[
'filter']
995 filterName = self.
filters[filterName]
996 item.setFilter(afwImage.Filter(filterName))
999 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1000 trimmed=True, setVisitInfo=True):
1001 """Default standardization function for images. 1003 This sets the Detector from the camera geometry 1004 and optionally set the Fiter. In both cases this saves 1005 having to persist some data in each exposure (or image). 1009 mapping : `lsst.obs.base.Mapping` 1010 Where to get the values from. 1011 item : image-like object 1012 Can be any of lsst.afw.image.Exposure, 1013 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1014 or lsst.afw.image.MaskedImage 1019 Set filter? Ignored if item is already an exposure 1021 Should detector be marked as trimmed? 1022 setVisitInfo : `bool` 1023 Should Exposure have its VisitInfo filled out from the metadata? 1027 `lsst.afw.image.Exposure` 1028 The standardized Exposure. 1031 item =
exposureFromImage(item, dataId, mapper=self, logger=self.
log, setVisitInfo=setVisitInfo)
1032 except Exception
as e:
1033 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1036 if mapping.level.lower() ==
"amp":
1038 elif mapping.level.lower() ==
"ccd":
1046 def _makeCamera(self, policy, repositoryDir):
1047 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 1050 Also set self.cameraDataLocation, if relevant (else it can be left 1053 This implementation assumes that policy contains an entry "camera" 1054 that points to the subdirectory in this package of camera data; 1055 specifically, that subdirectory must contain: 1056 - a file named `camera.py` that contains persisted camera config 1057 - ampInfo table FITS files, as required by 1058 lsst.afw.cameraGeom.makeCameraFromPath 1062 policy : `lsst.daf.persistence.Policy` 1063 Policy with per-camera defaults already merged 1064 (PexPolicy only for backward compatibility). 1065 repositoryDir : `str` 1066 Policy repository for the subclassing module (obtained with 1067 getRepositoryPath() on the per-camera default dictionary). 1069 if 'camera' not in policy:
1070 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1071 cameraDataSubdir = policy[
'camera']
1073 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1074 cameraConfig = afwCameraGeom.CameraConfig()
1077 return afwCameraGeom.makeCameraFromPath(
1078 cameraConfig=cameraConfig,
1079 ampInfoPath=ampInfoPath,
1085 """Get the registry used by this mapper. 1090 The registry used by this mapper for this mapper's repository. 1095 """Stuff image compression settings into a daf.base.PropertySet 1097 This goes into the ButlerLocation's "additionalData", which gets 1098 passed into the boost::persistence framework. 1103 Type of dataset for which to get the image compression settings. 1109 additionalData : `lsst.daf.base.PropertySet` 1110 Image compression settings. 1112 mapping = self.
mappings[datasetType]
1113 recipeName = mapping.recipe
1114 storageType = mapping.storage
1116 return dafBase.PropertySet()
1118 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1119 (datasetType, storageType, recipeName))
1120 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1121 seed = hash(tuple(dataId.items())) % 2**31
1122 for plane
in (
"image",
"mask",
"variance"):
1123 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1124 recipe.set(plane +
".scaling.seed", seed)
1127 def _initWriteRecipes(self):
1128 """Read the recipes for writing files 1130 These recipes are currently used for configuring FITS compression, 1131 but they could have wider uses for configuring different flavors 1132 of the storage types. A recipe is referred to by a symbolic name, 1133 which has associated settings. These settings are stored as a 1134 `PropertySet` so they can easily be passed down to the 1135 boost::persistence framework as the "additionalData" parameter. 1137 The list of recipes is written in YAML. A default recipe and 1138 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1139 and these may be overridden or supplemented by the individual obs_* 1140 packages' own policy/writeRecipes.yaml files. 1142 Recipes are grouped by the storage type. Currently, only the 1143 ``FitsStorage`` storage type uses recipes, which uses it to 1144 configure FITS image compression. 1146 Each ``FitsStorage`` recipe for FITS compression should define 1147 "image", "mask" and "variance" entries, each of which may contain 1148 "compression" and "scaling" entries. Defaults will be provided for 1149 any missing elements under "compression" and "scaling". 1151 The allowed entries under "compression" are: 1153 * algorithm (string): compression algorithm to use 1154 * rows (int): number of rows per tile (0 = entire dimension) 1155 * columns (int): number of columns per tile (0 = entire dimension) 1156 * quantizeLevel (float): cfitsio quantization level 1158 The allowed entries under "scaling" are: 1160 * algorithm (string): scaling algorithm to use 1161 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1162 * fuzz (bool): fuzz the values when quantising floating-point values? 1163 * seed (long): seed for random number generator when fuzzing 1164 * maskPlanes (list of string): mask planes to ignore when doing 1166 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1167 * quantizePad: number of stdev to allow on the low side (for 1168 STDEV_POSITIVE/NEGATIVE) 1169 * bscale: manually specified BSCALE (for MANUAL scaling) 1170 * bzero: manually specified BSCALE (for MANUAL scaling) 1172 A very simple example YAML recipe: 1178 algorithm: GZIP_SHUFFLE 1182 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1183 recipes = dafPersist.Policy(recipesFile)
1184 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1185 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1186 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1187 supplements = dafPersist.Policy(supplementsFile)
1189 for entry
in validationMenu:
1190 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1192 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1193 (supplementsFile, entry, recipesFile, intersection))
1194 recipes.update(supplements)
1197 for storageType
in recipes.names(
True):
1198 if "default" not in recipes[storageType]:
1199 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1200 (storageType, recipesFile))
1201 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1205 """Generate an Exposure from an image-like object 1207 If the image is a DecoratedImage then also set its WCS and metadata 1208 (Image and MaskedImage are missing the necessary metadata 1209 and Exposure already has those set) 1213 image : Image-like object 1214 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1219 `lsst.afw.image.Exposure` 1220 Exposure containing input image. 1223 if isinstance(image, afwImage.MaskedImage):
1224 exposure = afwImage.makeExposure(image)
1225 elif isinstance(image, afwImage.DecoratedImage):
1226 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1227 metadata = image.getMetadata()
1229 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1230 exposure.setWcs(wcs)
1231 except pexExcept.TypeError
as e:
1234 logger = lsstLog.Log.getLogger(
"CameraMapper")
1235 logger.debug(
"wcs set to None; insufficient information found in metadata to create a valid wcs:" 1238 exposure.setMetadata(metadata)
1239 elif isinstance(image, afwImage.Exposure):
1242 metadata = exposure.getMetadata()
1245 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1249 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1250 if metadata
is not None:
1253 logger = lsstLog.Log.getLogger(
"CameraMapper")
1254 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1256 exposureId = mapper._computeCcdExposureId(dataId)
1257 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1259 exposure.getInfo().setVisitInfo(visitInfo)
1265 """Validate recipes for FitsStorage 1267 The recipes are supplemented with default values where appropriate. 1269 TODO: replace this custom validation code with Cerberus (DM-11846) 1273 recipes : `lsst.daf.persistence.Policy` 1274 FitsStorage recipes to validate. 1278 validated : `lsst.daf.base.PropertySet` 1279 Validated FitsStorage recipe. 1284 If validation fails. 1288 compressionSchema = {
1289 "algorithm":
"NONE",
1292 "quantizeLevel": 0.0,
1295 "algorithm":
"NONE",
1297 "maskPlanes": [
"NO_DATA"],
1299 "quantizeLevel": 4.0,
1306 def checkUnrecognized(entry, allowed, description):
1307 """Check to see if the entry contains unrecognised keywords""" 1308 unrecognized = set(entry.keys()) - set(allowed)
1311 "Unrecognized entries when parsing image compression recipe %s: %s" %
1312 (description, unrecognized))
1315 for name
in recipes.names(
True):
1316 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1317 rr = dafBase.PropertySet()
1318 validated[name] = rr
1319 for plane
in (
"image",
"mask",
"variance"):
1320 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1321 name +
"->" + plane)
1323 for settings, schema
in ((
"compression", compressionSchema),
1324 (
"scaling", scalingSchema)):
1325 prefix = plane +
"." + settings
1326 if settings
not in recipes[name][plane]:
1328 rr.set(prefix +
"." + key, schema[key])
1330 entry = recipes[name][plane][settings]
1331 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1333 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1334 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)