27 from astro_metadata_translator
import fix_header
28 import lsst.daf.persistence
as dafPersist
29 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base
as dafBase
31 import lsst.afw.geom
as afwGeom
32 import lsst.afw.image
as afwImage
33 import lsst.afw.table
as afwTable
34 from lsst.afw.fits
import readMetadata
35 import lsst.afw.cameraGeom
as afwCameraGeom
36 import lsst.log
as lsstLog
38 from .exposureIdInfo
import ExposureIdInfo
39 from .makeRawVisitInfo
import MakeRawVisitInfo
42 __all__ = [
"CameraMapper",
"exposureFromImage"]
47 """CameraMapper is a base class for mappers that handle images from a 48 camera and products derived from them. This provides an abstraction layer 49 between the data on disk and the code. 51 Public methods: keys, queryMetadata, getDatasetTypes, map, 52 canStandardize, standardize 54 Mappers for specific data sources (e.g., CFHT Megacam, LSST 55 simulations, etc.) should inherit this class. 57 The CameraMapper manages datasets within a "root" directory. Note that 58 writing to a dataset present in the input root will hide the existing 59 dataset but not overwrite it. See #2160 for design discussion. 61 A camera is assumed to consist of one or more rafts, each composed of 62 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 63 (amps). A camera is also assumed to have a camera geometry description 64 (CameraGeom object) as a policy file, a filter description (Filter class 65 static configuration) as another policy file. 67 Information from the camera geometry and defects are inserted into all 68 Exposure objects returned. 70 The mapper uses one or two registries to retrieve metadata about the 71 images. The first is a registry of all raw exposures. This must contain 72 the time of the observation. One or more tables (or the equivalent) 73 within the registry are used to look up data identifier components that 74 are not specified by the user (e.g. filter) and to return results for 75 metadata queries. The second is an optional registry of all calibration 76 data. This should contain validity start and end entries for each 77 calibration dataset in the same timescale as the observation time. 79 Subclasses will typically set MakeRawVisitInfoClass and optionally the 80 metadata translator class: 82 MakeRawVisitInfoClass: a class variable that points to a subclass of 83 MakeRawVisitInfo, a functor that creates an 84 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 86 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 87 class to use for fixing metadata values. If it is not set an attempt 88 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 89 that the metadata fixup will try to infer the translator class from the 92 Subclasses must provide the following methods: 94 _extractDetectorName(self, dataId): returns the detector name for a CCD 95 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 96 a dataset identifier referring to that CCD or a subcomponent of it. 98 _computeCcdExposureId(self, dataId): see below 100 _computeCoaddExposureId(self, dataId, singleFilter): see below 102 Subclasses may also need to override the following methods: 104 _transformId(self, dataId): transformation of a data identifier 105 from colloquial usage (e.g., "ccdname") to proper/actual usage 106 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 107 commas). The default implementation does nothing. Note that this 108 method should not modify its input parameter. 110 getShortCcdName(self, ccdName): a static method that returns a shortened 111 name suitable for use as a filename. The default version converts spaces 114 _mapActualToPath(self, template, actualId): convert a template path to an 115 actual path, using the actual dataset identifier. 117 The mapper's behaviors are largely specified by the policy file. 118 See the MapperDictionary.paf for descriptions of the available items. 120 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 121 mappings (see Mappings class). 123 Common default mappings for all subclasses can be specified in the 124 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 125 provides a simple way to add a product to all camera mappers. 127 Functions to map (provide a path to the data given a dataset 128 identifier dictionary) and standardize (convert data into some standard 129 format or type) may be provided in the subclass as "map_{dataset type}" 130 and "std_{dataset type}", respectively. 132 If non-Exposure datasets cannot be retrieved using standard 133 daf_persistence methods alone, a "bypass_{dataset type}" function may be 134 provided in the subclass to return the dataset instead of using the 135 "datasets" subpolicy. 137 Implementations of map_camera and bypass_camera that should typically be 138 sufficient are provided in this base class. 144 Instead of auto-loading the camera at construction time, load it from 145 the calibration registry 149 policy : daf_persistence.Policy, 150 Policy with per-camera defaults already merged. 151 repositoryDir : string 152 Policy repository for the subclassing module (obtained with 153 getRepositoryPath() on the per-camera default dictionary). 154 root : string, optional 155 Path to the root directory for data. 156 registry : string, optional 157 Path to registry with data's metadata. 158 calibRoot : string, optional 159 Root directory for calibrations. 160 calibRegistry : string, optional 161 Path to registry with calibrations' metadata. 162 provided : list of string, optional 163 Keys provided by the mapper. 164 parentRegistry : Registry subclass, optional 165 Registry from a parent repository that may be used to look up 167 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 168 The configuration information for the repository this mapper is 175 MakeRawVisitInfoClass = MakeRawVisitInfo
178 PupilFactoryClass = afwCameraGeom.PupilFactory
181 translatorClass =
None 183 def __init__(self, policy, repositoryDir,
184 root=None, registry=None, calibRoot=None, calibRegistry=None,
185 provided=None, parentRegistry=None, repositoryCfg=None):
187 dafPersist.Mapper.__init__(self)
189 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
194 self.
root = repositoryCfg.root
198 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 199 if repoPolicy
is not None:
200 policy.update(repoPolicy)
204 if 'levels' in policy:
205 levelsPolicy = policy[
'levels']
206 for key
in levelsPolicy.names(
True):
207 self.
levels[key] = set(levelsPolicy.asArray(key))
210 if 'defaultSubLevels' in policy:
216 root = dafPersist.LogicalLocation(root).locString()
226 if calibRoot
is not None:
227 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
228 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
231 calibRoot = policy.get(
'calibRoot',
None)
233 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
235 if calibStorage
is None:
243 posixIfNoSql=(
not parentRegistry))
246 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
247 if needCalibRegistry:
250 "calibRegistryPath", calibStorage,
254 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
255 "calibRoot ivar:%s or policy['calibRoot']:%s" %
256 (calibRoot, policy.get(
'calibRoot',
None)))
276 raise ValueError(
'class variable packageName must not be None')
286 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
287 """Initialize mappings 289 For each of the dataset types that we want to be able to read, there 290 are methods that can be created to support them: 291 * map_<dataset> : determine the path for dataset 292 * std_<dataset> : standardize the retrieved dataset 293 * bypass_<dataset> : retrieve the dataset (bypassing the usual 295 * query_<dataset> : query the registry 297 Besides the dataset types explicitly listed in the policy, we create 298 additional, derived datasets for additional conveniences, 299 e.g., reading the header of an image, retrieving only the size of a 304 policy : `lsst.daf.persistence.Policy` 305 Policy with per-camera defaults already merged 306 rootStorage : `Storage subclass instance` 307 Interface to persisted repository data. 308 calibRoot : `Storage subclass instance` 309 Interface to persisted calib repository data 310 provided : `list` of `str` 311 Keys provided by the mapper 314 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
315 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
316 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
317 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
318 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
319 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
320 dsMappingPolicy = dafPersist.Policy()
324 (
"images", imgMappingPolicy, ImageMapping),
325 (
"exposures", expMappingPolicy, ExposureMapping),
326 (
"calibrations", calMappingPolicy, CalibrationMapping),
327 (
"datasets", dsMappingPolicy, DatasetMapping)
330 for name, defPolicy, cls
in mappingList:
332 datasets = policy[name]
335 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
336 if os.path.exists(defaultsPath):
337 datasets.merge(dafPersist.Policy(defaultsPath))
340 setattr(self, name, mappings)
341 for datasetType
in datasets.names(
True):
342 subPolicy = datasets[datasetType]
343 subPolicy.merge(defPolicy)
345 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
346 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
347 subPolicy=subPolicy):
348 components = subPolicy.get(
'composite')
349 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 350 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 351 python = subPolicy[
'python']
352 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
353 disassembler=disassembler,
357 for name, component
in components.items():
358 butlerComposite.add(id=name,
359 datasetType=component.get(
'datasetType'),
360 setter=component.get(
'setter',
None),
361 getter=component.get(
'getter',
None),
362 subset=component.get(
'subset',
False),
363 inputOnly=component.get(
'inputOnly',
False))
364 return butlerComposite
365 setattr(self,
"map_" + datasetType, compositeClosure)
369 if name ==
"calibrations":
371 provided=provided, dataRoot=rootStorage)
373 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
376 raise ValueError(f
"Duplicate mapping policy for dataset type {datasetType}")
377 self.
keyDict.update(mapping.keys())
378 mappings[datasetType] = mapping
379 self.
mappings[datasetType] = mapping
380 if not hasattr(self,
"map_" + datasetType):
381 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
382 return mapping.map(mapper, dataId, write)
383 setattr(self,
"map_" + datasetType, mapClosure)
384 if not hasattr(self,
"query_" + datasetType):
385 def queryClosure(format, dataId, mapping=mapping):
386 return mapping.lookup(format, dataId)
387 setattr(self,
"query_" + datasetType, queryClosure)
388 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
389 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
390 return mapping.standardize(mapper, item, dataId)
391 setattr(self,
"std_" + datasetType, stdClosure)
393 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
394 """Set convenience methods on CameraMapper""" 395 mapName =
"map_" + datasetType +
"_" + suffix
396 bypassName =
"bypass_" + datasetType +
"_" + suffix
397 queryName =
"query_" + datasetType +
"_" + suffix
398 if not hasattr(self, mapName):
399 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
400 if not hasattr(self, bypassName):
401 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
402 bypassImpl = getattr(self,
"bypass_" + datasetType)
403 if bypassImpl
is not None:
404 setattr(self, bypassName, bypassImpl)
405 if not hasattr(self, queryName):
406 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
409 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
410 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
412 if subPolicy[
"storage"] ==
"FitsStorage":
413 def getMetadata(datasetType, pythonType, location, dataId):
414 md = readMetadata(location.getLocationsWithRoot()[0])
418 setMethods(
"md", bypassImpl=getMetadata)
421 addName =
"add_" + datasetType
422 if not hasattr(self, addName):
425 if name ==
"exposures":
426 def getSkyWcs(datasetType, pythonType, location, dataId):
427 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
428 return fitsReader.readWcs()
430 setMethods(
"wcs", bypassImpl=getSkyWcs)
432 def getPhotoCalib(datasetType, pythonType, location, dataId):
433 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
434 return fitsReader.readPhotoCalib()
436 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
438 def getVisitInfo(datasetType, pythonType, location, dataId):
439 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
440 return fitsReader.readVisitInfo()
442 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
444 def getFilter(datasetType, pythonType, location, dataId):
445 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
446 return fitsReader.readFilter()
448 setMethods(
"filter", bypassImpl=getFilter)
450 setMethods(
"detector",
451 mapImpl=
lambda dataId, write=
False:
452 dafPersist.ButlerLocation(
453 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
455 storageName=
"Internal",
456 locationList=
"ignored",
461 bypassImpl=
lambda datasetType, pythonType, location, dataId:
465 def getBBox(datasetType, pythonType, location, dataId):
466 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
468 return afwImage.bboxFromMetadata(md)
470 setMethods(
"bbox", bypassImpl=getBBox)
472 elif name ==
"images":
473 def getBBox(datasetType, pythonType, location, dataId):
474 md = readMetadata(location.getLocationsWithRoot()[0])
476 return afwImage.bboxFromMetadata(md)
477 setMethods(
"bbox", bypassImpl=getBBox)
479 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
481 def getMetadata(datasetType, pythonType, location, dataId):
482 md = readMetadata(os.path.join(location.getStorage().root,
483 location.getLocations()[0]), hdu=1)
487 setMethods(
"md", bypassImpl=getMetadata)
490 if subPolicy[
"storage"] ==
"FitsStorage":
491 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
492 subId = dataId.copy()
494 loc = mapping.map(mapper, subId, write)
495 bbox = dataId[
'bbox']
496 llcX = bbox.getMinX()
497 llcY = bbox.getMinY()
498 width = bbox.getWidth()
499 height = bbox.getHeight()
500 loc.additionalData.set(
'llcX', llcX)
501 loc.additionalData.set(
'llcY', llcY)
502 loc.additionalData.set(
'width', width)
503 loc.additionalData.set(
'height', height)
504 if 'imageOrigin' in dataId:
505 loc.additionalData.set(
'imageOrigin',
506 dataId[
'imageOrigin'])
509 def querySubClosure(key, format, dataId, mapping=mapping):
510 subId = dataId.copy()
512 return mapping.lookup(format, subId)
513 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
515 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
518 def getLen(datasetType, pythonType, location, dataId):
519 md = readMetadata(os.path.join(location.getStorage().root,
520 location.getLocations()[0]), hdu=1)
524 setMethods(
"len", bypassImpl=getLen)
527 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
528 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
529 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
530 location.getLocations()[0])))
532 def _computeCcdExposureId(self, dataId):
533 """Compute the 64-bit (long) identifier for a CCD exposure. 535 Subclasses must override 540 Data identifier with visit, ccd. 542 raise NotImplementedError()
544 def _computeCoaddExposureId(self, dataId, singleFilter):
545 """Compute the 64-bit (long) identifier for a coadd. 547 Subclasses must override 552 Data identifier with tract and patch. 553 singleFilter : `bool` 554 True means the desired ID is for a single-filter coadd, in which 555 case dataIdmust contain filter. 557 raise NotImplementedError()
559 def _search(self, path):
560 """Search for path in the associated repository's storage. 565 Path that describes an object in the repository associated with 567 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 568 indicator will be stripped when searching and so will match 569 filenames without the HDU indicator, e.g. 'foo.fits'. The path 570 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 575 The path for this object in the repository. Will return None if the 576 object can't be found. If the input argument path contained an HDU 577 indicator, the returned path will also contain the HDU indicator. 582 """Rename any existing object with the given type and dataId. 584 The CameraMapper implementation saves objects in a sequence of e.g.: 590 All of the backups will be placed in the output repo, however, and will 591 not be removed if they are found elsewhere in the _parent chain. This 592 means that the same file will be stored twice if the previous version 593 was found in an input repo. 602 def firstElement(list):
603 """Get the first element in the list, or None if that can't be 606 return list[0]
if list
is not None and len(list)
else None 609 newLocation = self.map(datasetType, dataId, write=
True)
610 newPath = newLocation.getLocations()[0]
611 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
612 path = firstElement(path)
614 while path
is not None:
616 oldPaths.append((n, path))
617 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
618 path = firstElement(path)
619 for n, oldPath
in reversed(oldPaths):
620 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
623 """Return supported keys. 628 List of keys usable in a dataset identifier 633 """Return a dict of supported keys and their value types for a given 634 dataset type at a given level of the key hierarchy. 639 Dataset type or None for all dataset types. 640 level : `str` or None 641 Level or None for all levels or '' for the default level for the 647 Keys are strings usable in a dataset identifier, values are their 655 if datasetType
is None:
656 keyDict = copy.copy(self.
keyDict)
659 if level
is not None and level
in self.
levels:
660 keyDict = copy.copy(keyDict)
661 for l
in self.
levels[level]:
676 """Return the name of the camera that this CameraMapper is for.""" 678 className = className[className.find(
'.'):-1]
679 m = re.search(
r'(\w+)Mapper', className)
681 m = re.search(
r"class '[\w.]*?(\w+)'", className)
683 return name[:1].lower() + name[1:]
if name
else '' 687 """Return the name of the package containing this CameraMapper.""" 689 raise ValueError(
'class variable packageName must not be None')
694 """Return the base directory of this package""" 698 """Map a camera dataset.""" 700 raise RuntimeError(
"No camera dataset available.")
702 return dafPersist.ButlerLocation(
703 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
705 storageName=
"ConfigStorage",
713 """Return the (preloaded) camera object. 716 raise RuntimeError(
"No camera dataset available.")
720 return dafPersist.ButlerLocation(
721 pythonType=
"lsst.obs.base.ExposureIdInfo",
723 storageName=
"Internal",
724 locationList=
"ignored",
731 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 732 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
733 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
737 """Disable standardization for bfKernel 739 bfKernel is a calibration product that is numpy array, 740 unlike other calibration products that are all images; 741 all calibration images are sent through _standardizeExposure 742 due to CalibrationMapping, but we don't want that to happen to bfKernel 747 """Standardize a raw dataset by converting it to an Exposure instead 750 trimmed=
False, setVisitInfo=
True)
753 """Map a sky policy.""" 754 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
755 "Internal",
None,
None, self,
759 """Standardize a sky policy by returning the one we use.""" 760 return self.skypolicy
768 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
770 """Set up a registry (usually SQLite3), trying a number of possible 778 Description of registry (for log messages) 782 Policy that contains the registry name, used if path is None. 784 Key in policy for registry path. 785 storage : Storage subclass 786 Repository Storage to look in. 787 searchParents : bool, optional 788 True if the search for a registry should follow any Butler v1 790 posixIfNoSql : bool, optional 791 If an sqlite registry is not found, will create a posix registry if 796 lsst.daf.persistence.Registry 799 if path
is None and policyKey
in policy:
800 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
801 if os.path.isabs(path):
802 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
803 if not storage.exists(path):
804 newPath = storage.instanceSearch(path)
806 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 808 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
812 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
820 if path
and (path.startswith(root)):
821 path = path[len(root +
'/'):]
822 except AttributeError:
828 def search(filename, description):
829 """Search for file in storage 834 Filename to search for 836 Description of file, for error message. 840 path : `str` or `None` 841 Path to file, or None 843 result = storage.instanceSearch(filename)
846 self.
log.debug(
"Unable to locate %s: %s", description, filename)
851 path = search(
"%s.pgsql" % name,
"%s in root" % description)
853 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
855 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
858 if not storage.exists(path):
859 newPath = storage.instanceSearch(path)
860 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 861 if newPath
is not None:
863 localFileObj = storage.getLocalFile(path)
864 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
865 registry = dafPersist.Registry.create(localFileObj.name)
867 elif not registry
and posixIfNoSql:
869 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
870 registry = dafPersist.PosixRegistry(storage.root)
876 def _transformId(self, dataId):
877 """Generate a standard ID dict from a camera-specific ID dict. 879 Canonical keys include: 880 - amp: amplifier name 881 - ccd: CCD name (in LSST this is a combination of raft and sensor) 882 The default implementation returns a copy of its input. 887 Dataset identifier; this must not be modified 892 Transformed dataset identifier. 897 def _mapActualToPath(self, template, actualId):
898 """Convert a template path to an actual path, using the actual data 899 identifier. This implementation is usually sufficient but can be 900 overridden by the subclass. 917 return template % transformedId
918 except Exception
as e:
919 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
923 """Convert a CCD name to a form useful as a filename 925 The default implementation converts spaces to underscores. 927 return ccdName.replace(
" ",
"_")
929 def _extractDetectorName(self, dataId):
930 """Extract the detector (CCD) name from the dataset identifier. 932 The name in question is the detector name used by lsst.afw.cameraGeom. 944 raise NotImplementedError(
"No _extractDetectorName() function specified")
946 def _extractAmpId(self, dataId):
947 """Extract the amplifier identifer from a dataset identifier. 949 .. note:: Deprecated in 11_0 951 amplifier identifier has two parts: the detector name for the CCD 952 containing the amplifier and index of the amplifier in the detector. 966 return (trDataId[
"ccd"], int(trDataId[
'amp']))
968 def _setAmpDetector(self, item, dataId, trimmed=True):
969 """Set the detector object in an Exposure for an amplifier. 971 Defects are also added to the Exposure based on the detector object. 975 item : `lsst.afw.image.Exposure` 976 Exposure to set the detector in. 980 Should detector be marked as trimmed? (ignored) 985 def _setCcdDetector(self, item, dataId, trimmed=True):
986 """Set the detector object in an Exposure for a CCD. 990 item : `lsst.afw.image.Exposure` 991 Exposure to set the detector in. 995 Should detector be marked as trimmed? (ignored) 997 if item.getDetector()
is not None:
1001 detector = self.
camera[detectorName]
1002 item.setDetector(detector)
1004 def _setFilter(self, mapping, item, dataId):
1005 """Set the filter object in an Exposure. If the Exposure had a FILTER 1006 keyword, this was already processed during load. But if it didn't, 1007 use the filter from the registry. 1011 mapping : `lsst.obs.base.Mapping` 1012 Where to get the filter from. 1013 item : `lsst.afw.image.Exposure` 1014 Exposure to set the filter in. 1019 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 1020 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1023 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1026 actualId = mapping.need([
'filter'], dataId)
1027 filterName = actualId[
'filter']
1029 filterName = self.
filters[filterName]
1030 item.setFilter(afwImage.Filter(filterName))
1033 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1034 trimmed=True, setVisitInfo=True):
1035 """Default standardization function for images. 1037 This sets the Detector from the camera geometry 1038 and optionally set the Fiter. In both cases this saves 1039 having to persist some data in each exposure (or image). 1043 mapping : `lsst.obs.base.Mapping` 1044 Where to get the values from. 1045 item : image-like object 1046 Can be any of lsst.afw.image.Exposure, 1047 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1048 or lsst.afw.image.MaskedImage 1053 Set filter? Ignored if item is already an exposure 1055 Should detector be marked as trimmed? 1056 setVisitInfo : `bool` 1057 Should Exposure have its VisitInfo filled out from the metadata? 1061 `lsst.afw.image.Exposure` 1062 The standardized Exposure. 1065 item =
exposureFromImage(item, dataId, mapper=self, logger=self.
log, setVisitInfo=setVisitInfo)
1066 except Exception
as e:
1067 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1070 if mapping.level.lower() ==
"amp":
1072 elif mapping.level.lower() ==
"ccd":
1080 def _makeCamera(self, policy, repositoryDir):
1081 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 1084 Also set self.cameraDataLocation, if relevant (else it can be left 1087 This implementation assumes that policy contains an entry "camera" 1088 that points to the subdirectory in this package of camera data; 1089 specifically, that subdirectory must contain: 1090 - a file named `camera.py` that contains persisted camera config 1091 - ampInfo table FITS files, as required by 1092 lsst.afw.cameraGeom.makeCameraFromPath 1096 policy : `lsst.daf.persistence.Policy` 1097 Policy with per-camera defaults already merged 1098 (PexPolicy only for backward compatibility). 1099 repositoryDir : `str` 1100 Policy repository for the subclassing module (obtained with 1101 getRepositoryPath() on the per-camera default dictionary). 1103 if 'camera' not in policy:
1104 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1105 cameraDataSubdir = policy[
'camera']
1107 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1108 cameraConfig = afwCameraGeom.CameraConfig()
1111 return afwCameraGeom.makeCameraFromPath(
1112 cameraConfig=cameraConfig,
1113 ampInfoPath=ampInfoPath,
1119 """Get the registry used by this mapper. 1124 The registry used by this mapper for this mapper's repository. 1129 """Stuff image compression settings into a daf.base.PropertySet 1131 This goes into the ButlerLocation's "additionalData", which gets 1132 passed into the boost::persistence framework. 1137 Type of dataset for which to get the image compression settings. 1143 additionalData : `lsst.daf.base.PropertySet` 1144 Image compression settings. 1146 mapping = self.
mappings[datasetType]
1147 recipeName = mapping.recipe
1148 storageType = mapping.storage
1150 return dafBase.PropertySet()
1152 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1153 (datasetType, storageType, recipeName))
1154 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1155 seed = hash(tuple(dataId.items())) % 2**31
1156 for plane
in (
"image",
"mask",
"variance"):
1157 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1158 recipe.set(plane +
".scaling.seed", seed)
1161 def _initWriteRecipes(self):
1162 """Read the recipes for writing files 1164 These recipes are currently used for configuring FITS compression, 1165 but they could have wider uses for configuring different flavors 1166 of the storage types. A recipe is referred to by a symbolic name, 1167 which has associated settings. These settings are stored as a 1168 `PropertySet` so they can easily be passed down to the 1169 boost::persistence framework as the "additionalData" parameter. 1171 The list of recipes is written in YAML. A default recipe and 1172 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1173 and these may be overridden or supplemented by the individual obs_* 1174 packages' own policy/writeRecipes.yaml files. 1176 Recipes are grouped by the storage type. Currently, only the 1177 ``FitsStorage`` storage type uses recipes, which uses it to 1178 configure FITS image compression. 1180 Each ``FitsStorage`` recipe for FITS compression should define 1181 "image", "mask" and "variance" entries, each of which may contain 1182 "compression" and "scaling" entries. Defaults will be provided for 1183 any missing elements under "compression" and "scaling". 1185 The allowed entries under "compression" are: 1187 * algorithm (string): compression algorithm to use 1188 * rows (int): number of rows per tile (0 = entire dimension) 1189 * columns (int): number of columns per tile (0 = entire dimension) 1190 * quantizeLevel (float): cfitsio quantization level 1192 The allowed entries under "scaling" are: 1194 * algorithm (string): scaling algorithm to use 1195 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1196 * fuzz (bool): fuzz the values when quantising floating-point values? 1197 * seed (long): seed for random number generator when fuzzing 1198 * maskPlanes (list of string): mask planes to ignore when doing 1200 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1201 * quantizePad: number of stdev to allow on the low side (for 1202 STDEV_POSITIVE/NEGATIVE) 1203 * bscale: manually specified BSCALE (for MANUAL scaling) 1204 * bzero: manually specified BSCALE (for MANUAL scaling) 1206 A very simple example YAML recipe: 1212 algorithm: GZIP_SHUFFLE 1216 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1217 recipes = dafPersist.Policy(recipesFile)
1218 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1219 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1220 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1221 supplements = dafPersist.Policy(supplementsFile)
1223 for entry
in validationMenu:
1224 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1226 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1227 (supplementsFile, entry, recipesFile, intersection))
1228 recipes.update(supplements)
1231 for storageType
in recipes.names(
True):
1232 if "default" not in recipes[storageType]:
1233 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1234 (storageType, recipesFile))
1235 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1239 """Generate an Exposure from an image-like object 1241 If the image is a DecoratedImage then also set its WCS and metadata 1242 (Image and MaskedImage are missing the necessary metadata 1243 and Exposure already has those set) 1247 image : Image-like object 1248 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1253 `lsst.afw.image.Exposure` 1254 Exposure containing input image. 1257 if isinstance(image, afwImage.MaskedImage):
1258 exposure = afwImage.makeExposure(image)
1259 elif isinstance(image, afwImage.DecoratedImage):
1260 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1261 metadata = image.getMetadata()
1263 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1264 exposure.setWcs(wcs)
1265 except pexExcept.TypeError
as e:
1268 logger = lsstLog.Log.getLogger(
"CameraMapper")
1269 logger.debug(
"wcs set to None; insufficient information found in metadata to create a valid wcs:" 1272 exposure.setMetadata(metadata)
1273 elif isinstance(image, afwImage.Exposure):
1276 metadata = exposure.getMetadata()
1279 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1283 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1284 if metadata
is not None:
1287 logger = lsstLog.Log.getLogger(
"CameraMapper")
1288 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1290 exposureId = mapper._computeCcdExposureId(dataId)
1291 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1293 exposure.getInfo().setVisitInfo(visitInfo)
1299 """Validate recipes for FitsStorage 1301 The recipes are supplemented with default values where appropriate. 1303 TODO: replace this custom validation code with Cerberus (DM-11846) 1307 recipes : `lsst.daf.persistence.Policy` 1308 FitsStorage recipes to validate. 1312 validated : `lsst.daf.base.PropertySet` 1313 Validated FitsStorage recipe. 1318 If validation fails. 1322 compressionSchema = {
1323 "algorithm":
"NONE",
1326 "quantizeLevel": 0.0,
1329 "algorithm":
"NONE",
1331 "maskPlanes": [
"NO_DATA"],
1333 "quantizeLevel": 4.0,
1340 def checkUnrecognized(entry, allowed, description):
1341 """Check to see if the entry contains unrecognised keywords""" 1342 unrecognized = set(entry.keys()) - set(allowed)
1345 "Unrecognized entries when parsing image compression recipe %s: %s" %
1346 (description, unrecognized))
1349 for name
in recipes.names(
True):
1350 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1351 rr = dafBase.PropertySet()
1352 validated[name] = rr
1353 for plane
in (
"image",
"mask",
"variance"):
1354 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1355 name +
"->" + plane)
1357 for settings, schema
in ((
"compression", compressionSchema),
1358 (
"scaling", scalingSchema)):
1359 prefix = plane +
"." + settings
1360 if settings
not in recipes[name][plane]:
1362 rr.set(prefix +
"." + key, schema[key])
1364 entry = recipes[name][plane][settings]
1365 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1367 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1368 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)