28 from deprecated.sphinx
import deprecated
30 from astro_metadata_translator
import fix_header
31 import lsst.daf.persistence
as dafPersist
32 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
33 import lsst.daf.base
as dafBase
34 import lsst.afw.geom
as afwGeom
35 import lsst.afw.image
as afwImage
36 import lsst.afw.table
as afwTable
37 from lsst.afw.fits
import readMetadata
38 import lsst.afw.cameraGeom
as afwCameraGeom
39 import lsst.log
as lsstLog
41 from .exposureIdInfo
import ExposureIdInfo
42 from .makeRawVisitInfo
import MakeRawVisitInfo
43 from .utils
import createInitialSkyWcs, InitialSkyWcsError
46 __all__ = [
"CameraMapper",
"exposureFromImage"]
51 """CameraMapper is a base class for mappers that handle images from a 52 camera and products derived from them. This provides an abstraction layer 53 between the data on disk and the code. 55 Public methods: keys, queryMetadata, getDatasetTypes, map, 56 canStandardize, standardize 58 Mappers for specific data sources (e.g., CFHT Megacam, LSST 59 simulations, etc.) should inherit this class. 61 The CameraMapper manages datasets within a "root" directory. Note that 62 writing to a dataset present in the input root will hide the existing 63 dataset but not overwrite it. See #2160 for design discussion. 65 A camera is assumed to consist of one or more rafts, each composed of 66 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 67 (amps). A camera is also assumed to have a camera geometry description 68 (CameraGeom object) as a policy file, a filter description (Filter class 69 static configuration) as another policy file. 71 Information from the camera geometry and defects are inserted into all 72 Exposure objects returned. 74 The mapper uses one or two registries to retrieve metadata about the 75 images. The first is a registry of all raw exposures. This must contain 76 the time of the observation. One or more tables (or the equivalent) 77 within the registry are used to look up data identifier components that 78 are not specified by the user (e.g. filter) and to return results for 79 metadata queries. The second is an optional registry of all calibration 80 data. This should contain validity start and end entries for each 81 calibration dataset in the same timescale as the observation time. 83 Subclasses will typically set MakeRawVisitInfoClass and optionally the 84 metadata translator class: 86 MakeRawVisitInfoClass: a class variable that points to a subclass of 87 MakeRawVisitInfo, a functor that creates an 88 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 90 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 91 class to use for fixing metadata values. If it is not set an attempt 92 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 93 that the metadata fixup will try to infer the translator class from the 96 Subclasses must provide the following methods: 98 _extractDetectorName(self, dataId): returns the detector name for a CCD 99 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 100 a dataset identifier referring to that CCD or a subcomponent of it. 102 _computeCcdExposureId(self, dataId): see below 104 _computeCoaddExposureId(self, dataId, singleFilter): see below 106 Subclasses may also need to override the following methods: 108 _transformId(self, dataId): transformation of a data identifier 109 from colloquial usage (e.g., "ccdname") to proper/actual usage 110 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 111 commas). The default implementation does nothing. Note that this 112 method should not modify its input parameter. 114 getShortCcdName(self, ccdName): a static method that returns a shortened 115 name suitable for use as a filename. The default version converts spaces 118 _mapActualToPath(self, template, actualId): convert a template path to an 119 actual path, using the actual dataset identifier. 121 The mapper's behaviors are largely specified by the policy file. 122 See the MapperDictionary.paf for descriptions of the available items. 124 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 125 mappings (see Mappings class). 127 Common default mappings for all subclasses can be specified in the 128 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 129 provides a simple way to add a product to all camera mappers. 131 Functions to map (provide a path to the data given a dataset 132 identifier dictionary) and standardize (convert data into some standard 133 format or type) may be provided in the subclass as "map_{dataset type}" 134 and "std_{dataset type}", respectively. 136 If non-Exposure datasets cannot be retrieved using standard 137 daf_persistence methods alone, a "bypass_{dataset type}" function may be 138 provided in the subclass to return the dataset instead of using the 139 "datasets" subpolicy. 141 Implementations of map_camera and bypass_camera that should typically be 142 sufficient are provided in this base class. 148 Instead of auto-loading the camera at construction time, load it from 149 the calibration registry 153 policy : daf_persistence.Policy, 154 Policy with per-camera defaults already merged. 155 repositoryDir : string 156 Policy repository for the subclassing module (obtained with 157 getRepositoryPath() on the per-camera default dictionary). 158 root : string, optional 159 Path to the root directory for data. 160 registry : string, optional 161 Path to registry with data's metadata. 162 calibRoot : string, optional 163 Root directory for calibrations. 164 calibRegistry : string, optional 165 Path to registry with calibrations' metadata. 166 provided : list of string, optional 167 Keys provided by the mapper. 168 parentRegistry : Registry subclass, optional 169 Registry from a parent repository that may be used to look up 171 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 172 The configuration information for the repository this mapper is 179 MakeRawVisitInfoClass = MakeRawVisitInfo
182 PupilFactoryClass = afwCameraGeom.PupilFactory
185 translatorClass =
None 187 def __init__(self, policy, repositoryDir,
188 root=None, registry=None, calibRoot=None, calibRegistry=None,
189 provided=None, parentRegistry=None, repositoryCfg=None):
191 dafPersist.Mapper.__init__(self)
193 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
198 self.
root = repositoryCfg.root
202 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 203 if repoPolicy
is not None:
204 policy.update(repoPolicy)
208 if 'levels' in policy:
209 levelsPolicy = policy[
'levels']
210 for key
in levelsPolicy.names(
True):
211 self.
levels[key] = set(levelsPolicy.asArray(key))
214 if 'defaultSubLevels' in policy:
220 root = dafPersist.LogicalLocation(root).locString()
230 if calibRoot
is not None:
231 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
232 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
235 calibRoot = policy.get(
'calibRoot',
None)
237 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
239 if calibStorage
is None:
247 posixIfNoSql=(
not parentRegistry))
250 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
251 if needCalibRegistry:
254 "calibRegistryPath", calibStorage,
258 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " 259 f
"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
279 raise ValueError(
'class variable packageName must not be None')
289 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
290 """Initialize mappings 292 For each of the dataset types that we want to be able to read, there 293 are methods that can be created to support them: 294 * map_<dataset> : determine the path for dataset 295 * std_<dataset> : standardize the retrieved dataset 296 * bypass_<dataset> : retrieve the dataset (bypassing the usual 298 * query_<dataset> : query the registry 300 Besides the dataset types explicitly listed in the policy, we create 301 additional, derived datasets for additional conveniences, 302 e.g., reading the header of an image, retrieving only the size of a 307 policy : `lsst.daf.persistence.Policy` 308 Policy with per-camera defaults already merged 309 rootStorage : `Storage subclass instance` 310 Interface to persisted repository data. 311 calibRoot : `Storage subclass instance` 312 Interface to persisted calib repository data 313 provided : `list` of `str` 314 Keys provided by the mapper 317 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
319 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
321 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
323 dsMappingPolicy = dafPersist.Policy()
327 (
"images", imgMappingPolicy, ImageMapping),
328 (
"exposures", expMappingPolicy, ExposureMapping),
329 (
"calibrations", calMappingPolicy, CalibrationMapping),
330 (
"datasets", dsMappingPolicy, DatasetMapping)
333 for name, defPolicy, cls
in mappingList:
335 datasets = policy[name]
338 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
339 if os.path.exists(defaultsPath):
340 datasets.merge(dafPersist.Policy(defaultsPath))
343 setattr(self, name, mappings)
344 for datasetType
in datasets.names(
True):
345 subPolicy = datasets[datasetType]
346 subPolicy.merge(defPolicy)
348 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
349 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
350 subPolicy=subPolicy):
351 components = subPolicy.get(
'composite')
352 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 353 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 354 python = subPolicy[
'python']
355 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
356 disassembler=disassembler,
360 for name, component
in components.items():
361 butlerComposite.add(id=name,
362 datasetType=component.get(
'datasetType'),
363 setter=component.get(
'setter',
None),
364 getter=component.get(
'getter',
None),
365 subset=component.get(
'subset',
False),
366 inputOnly=component.get(
'inputOnly',
False))
367 return butlerComposite
368 setattr(self,
"map_" + datasetType, compositeClosure)
372 if name ==
"calibrations":
374 provided=provided, dataRoot=rootStorage)
376 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
379 raise ValueError(f
"Duplicate mapping policy for dataset type {datasetType}")
380 self.
keyDict.update(mapping.keys())
381 mappings[datasetType] = mapping
382 self.
mappings[datasetType] = mapping
383 if not hasattr(self,
"map_" + datasetType):
384 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
385 return mapping.map(mapper, dataId, write)
386 setattr(self,
"map_" + datasetType, mapClosure)
387 if not hasattr(self,
"query_" + datasetType):
388 def queryClosure(format, dataId, mapping=mapping):
389 return mapping.lookup(format, dataId)
390 setattr(self,
"query_" + datasetType, queryClosure)
391 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
392 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
393 return mapping.standardize(mapper, item, dataId)
394 setattr(self,
"std_" + datasetType, stdClosure)
396 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
397 """Set convenience methods on CameraMapper""" 398 mapName =
"map_" + datasetType +
"_" + suffix
399 bypassName =
"bypass_" + datasetType +
"_" + suffix
400 queryName =
"query_" + datasetType +
"_" + suffix
401 if not hasattr(self, mapName):
402 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
403 if not hasattr(self, bypassName):
404 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
405 bypassImpl = getattr(self,
"bypass_" + datasetType)
406 if bypassImpl
is not None:
407 setattr(self, bypassName, bypassImpl)
408 if not hasattr(self, queryName):
409 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
412 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
413 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
415 if subPolicy[
"storage"] ==
"FitsStorage":
416 def getMetadata(datasetType, pythonType, location, dataId):
417 md = readMetadata(location.getLocationsWithRoot()[0])
421 setMethods(
"md", bypassImpl=getMetadata)
424 addName =
"add_" + datasetType
425 if not hasattr(self, addName):
428 if name ==
"exposures":
429 def getSkyWcs(datasetType, pythonType, location, dataId):
430 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
431 return fitsReader.readWcs()
433 setMethods(
"wcs", bypassImpl=getSkyWcs)
435 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
436 """Create a SkyWcs from the un-modified raw FITS WCS header keys.""" 437 if datasetType[:3] !=
"raw":
438 raise dafPersist.NoResults(
"Can only get header WCS for raw exposures.",
440 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
442 setMethods(
"header_wcs", bypassImpl=getRawHeaderWcs)
444 def getPhotoCalib(datasetType, pythonType, location, dataId):
445 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
446 return fitsReader.readPhotoCalib()
448 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
450 def getVisitInfo(datasetType, pythonType, location, dataId):
451 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
452 return fitsReader.readVisitInfo()
454 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
456 def getFilter(datasetType, pythonType, location, dataId):
457 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
458 return fitsReader.readFilter()
460 setMethods(
"filter", bypassImpl=getFilter)
462 setMethods(
"detector",
463 mapImpl=
lambda dataId, write=
False:
464 dafPersist.ButlerLocation(
465 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
467 storageName=
"Internal",
468 locationList=
"ignored",
473 bypassImpl=
lambda datasetType, pythonType, location, dataId:
477 def getBBox(datasetType, pythonType, location, dataId):
478 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
480 return afwImage.bboxFromMetadata(md)
482 setMethods(
"bbox", bypassImpl=getBBox)
484 elif name ==
"images":
485 def getBBox(datasetType, pythonType, location, dataId):
486 md = readMetadata(location.getLocationsWithRoot()[0])
488 return afwImage.bboxFromMetadata(md)
489 setMethods(
"bbox", bypassImpl=getBBox)
491 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
493 def getMetadata(datasetType, pythonType, location, dataId):
494 md = readMetadata(os.path.join(location.getStorage().root,
495 location.getLocations()[0]), hdu=1)
499 setMethods(
"md", bypassImpl=getMetadata)
502 if subPolicy[
"storage"] ==
"FitsStorage":
503 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
504 subId = dataId.copy()
506 loc = mapping.map(mapper, subId, write)
507 bbox = dataId[
'bbox']
508 llcX = bbox.getMinX()
509 llcY = bbox.getMinY()
510 width = bbox.getWidth()
511 height = bbox.getHeight()
512 loc.additionalData.set(
'llcX', llcX)
513 loc.additionalData.set(
'llcY', llcY)
514 loc.additionalData.set(
'width', width)
515 loc.additionalData.set(
'height', height)
516 if 'imageOrigin' in dataId:
517 loc.additionalData.set(
'imageOrigin',
518 dataId[
'imageOrigin'])
521 def querySubClosure(key, format, dataId, mapping=mapping):
522 subId = dataId.copy()
524 return mapping.lookup(format, subId)
525 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
527 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
530 def getLen(datasetType, pythonType, location, dataId):
531 md = readMetadata(os.path.join(location.getStorage().root,
532 location.getLocations()[0]), hdu=1)
536 setMethods(
"len", bypassImpl=getLen)
539 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
540 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
541 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
542 location.getLocations()[0])))
544 def _computeCcdExposureId(self, dataId):
545 """Compute the 64-bit (long) identifier for a CCD exposure. 547 Subclasses must override 552 Data identifier with visit, ccd. 554 raise NotImplementedError()
556 def _computeCoaddExposureId(self, dataId, singleFilter):
557 """Compute the 64-bit (long) identifier for a coadd. 559 Subclasses must override 564 Data identifier with tract and patch. 565 singleFilter : `bool` 566 True means the desired ID is for a single-filter coadd, in which 567 case dataIdmust contain filter. 569 raise NotImplementedError()
571 def _search(self, path):
572 """Search for path in the associated repository's storage. 577 Path that describes an object in the repository associated with 579 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 580 indicator will be stripped when searching and so will match 581 filenames without the HDU indicator, e.g. 'foo.fits'. The path 582 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 587 The path for this object in the repository. Will return None if the 588 object can't be found. If the input argument path contained an HDU 589 indicator, the returned path will also contain the HDU indicator. 594 """Rename any existing object with the given type and dataId. 596 The CameraMapper implementation saves objects in a sequence of e.g.: 602 All of the backups will be placed in the output repo, however, and will 603 not be removed if they are found elsewhere in the _parent chain. This 604 means that the same file will be stored twice if the previous version 605 was found in an input repo. 614 def firstElement(list):
615 """Get the first element in the list, or None if that can't be 618 return list[0]
if list
is not None and len(list)
else None 621 newLocation = self.map(datasetType, dataId, write=
True)
622 newPath = newLocation.getLocations()[0]
623 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
624 path = firstElement(path)
626 while path
is not None:
628 oldPaths.append((n, path))
629 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
630 path = firstElement(path)
631 for n, oldPath
in reversed(oldPaths):
632 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
635 """Return supported keys. 640 List of keys usable in a dataset identifier 645 """Return a dict of supported keys and their value types for a given 646 dataset type at a given level of the key hierarchy. 651 Dataset type or None for all dataset types. 652 level : `str` or None 653 Level or None for all levels or '' for the default level for the 659 Keys are strings usable in a dataset identifier, values are their 667 if datasetType
is None:
668 keyDict = copy.copy(self.
keyDict)
671 if level
is not None and level
in self.
levels:
672 keyDict = copy.copy(keyDict)
673 for l
in self.
levels[level]:
688 """Return the name of the camera that this CameraMapper is for.""" 690 className = className[className.find(
'.'):-1]
691 m = re.search(
r'(\w+)Mapper', className)
693 m = re.search(
r"class '[\w.]*?(\w+)'", className)
695 return name[:1].lower() + name[1:]
if name
else '' 699 """Return the name of the package containing this CameraMapper.""" 701 raise ValueError(
'class variable packageName must not be None')
706 """Return the base directory of this package""" 710 """Map a camera dataset.""" 712 raise RuntimeError(
"No camera dataset available.")
714 return dafPersist.ButlerLocation(
715 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
717 storageName=
"ConfigStorage",
725 """Return the (preloaded) camera object. 728 raise RuntimeError(
"No camera dataset available.")
732 return dafPersist.ButlerLocation(
733 pythonType=
"lsst.obs.base.ExposureIdInfo",
735 storageName=
"Internal",
736 locationList=
"ignored",
743 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 744 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
745 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
749 """Disable standardization for bfKernel 751 bfKernel is a calibration product that is numpy array, 752 unlike other calibration products that are all images; 753 all calibration images are sent through _standardizeExposure 754 due to CalibrationMapping, but we don't want that to happen to bfKernel 759 """Standardize a raw dataset by converting it to an Exposure instead 762 trimmed=
False, setVisitInfo=
True)
765 """Map a sky policy.""" 766 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
767 "Internal",
None,
None, self,
771 """Standardize a sky policy by returning the one we use.""" 772 return self.skypolicy
780 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
782 """Set up a registry (usually SQLite3), trying a number of possible 790 Description of registry (for log messages) 794 Policy that contains the registry name, used if path is None. 796 Key in policy for registry path. 797 storage : Storage subclass 798 Repository Storage to look in. 799 searchParents : bool, optional 800 True if the search for a registry should follow any Butler v1 802 posixIfNoSql : bool, optional 803 If an sqlite registry is not found, will create a posix registry if 808 lsst.daf.persistence.Registry 811 if path
is None and policyKey
in policy:
812 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
813 if os.path.isabs(path):
814 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
815 if not storage.exists(path):
816 newPath = storage.instanceSearch(path)
818 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 820 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
824 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
832 if path
and (path.startswith(root)):
833 path = path[len(root +
'/'):]
834 except AttributeError:
840 def search(filename, description):
841 """Search for file in storage 846 Filename to search for 848 Description of file, for error message. 852 path : `str` or `None` 853 Path to file, or None 855 result = storage.instanceSearch(filename)
858 self.
log.debug(
"Unable to locate %s: %s", description, filename)
863 path = search(
"%s.pgsql" % name,
"%s in root" % description)
865 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
867 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
870 if not storage.exists(path):
871 newPath = storage.instanceSearch(path)
872 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 873 if newPath
is not None:
875 localFileObj = storage.getLocalFile(path)
876 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
877 registry = dafPersist.Registry.create(localFileObj.name)
879 elif not registry
and posixIfNoSql:
881 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
882 registry = dafPersist.PosixRegistry(storage.root)
888 def _transformId(self, dataId):
889 """Generate a standard ID dict from a camera-specific ID dict. 891 Canonical keys include: 892 - amp: amplifier name 893 - ccd: CCD name (in LSST this is a combination of raft and sensor) 894 The default implementation returns a copy of its input. 899 Dataset identifier; this must not be modified 904 Transformed dataset identifier. 909 def _mapActualToPath(self, template, actualId):
910 """Convert a template path to an actual path, using the actual data 911 identifier. This implementation is usually sufficient but can be 912 overridden by the subclass. 929 return template % transformedId
930 except Exception
as e:
931 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
935 """Convert a CCD name to a form useful as a filename 937 The default implementation converts spaces to underscores. 939 return ccdName.replace(
" ",
"_")
941 def _extractDetectorName(self, dataId):
942 """Extract the detector (CCD) name from the dataset identifier. 944 The name in question is the detector name used by lsst.afw.cameraGeom. 956 raise NotImplementedError(
"No _extractDetectorName() function specified")
958 @deprecated(
"This method is no longer used for ISR (will be removed after v11)", category=FutureWarning)
959 def _extractAmpId(self, dataId):
960 """Extract the amplifier identifer from a dataset identifier. 962 .. note:: Deprecated in 11_0 964 amplifier identifier has two parts: the detector name for the CCD 965 containing the amplifier and index of the amplifier in the detector. 979 return (trDataId[
"ccd"], int(trDataId[
'amp']))
981 def _setAmpDetector(self, item, dataId, trimmed=True):
982 """Set the detector object in an Exposure for an amplifier. 984 Defects are also added to the Exposure based on the detector object. 988 item : `lsst.afw.image.Exposure` 989 Exposure to set the detector in. 993 Should detector be marked as trimmed? (ignored) 998 def _setCcdDetector(self, item, dataId, trimmed=True):
999 """Set the detector object in an Exposure for a CCD. 1003 item : `lsst.afw.image.Exposure` 1004 Exposure to set the detector in. 1008 Should detector be marked as trimmed? (ignored) 1010 if item.getDetector()
is not None:
1014 detector = self.
camera[detectorName]
1015 item.setDetector(detector)
1017 def _setFilter(self, mapping, item, dataId):
1018 """Set the filter object in an Exposure. If the Exposure had a FILTER 1019 keyword, this was already processed during load. But if it didn't, 1020 use the filter from the registry. 1024 mapping : `lsst.obs.base.Mapping` 1025 Where to get the filter from. 1026 item : `lsst.afw.image.Exposure` 1027 Exposure to set the filter in. 1032 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
1033 or isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1036 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1039 actualId = mapping.need([
'filter'], dataId)
1040 filterName = actualId[
'filter']
1042 filterName = self.
filters[filterName]
1044 item.setFilter(afwImage.Filter(filterName))
1045 except pexExcept.NotFoundError:
1046 self.
log.warn(
"Filter %s not defined. Set to UNKNOWN." % (filterName))
1048 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1049 trimmed=True, setVisitInfo=True):
1050 """Default standardization function for images. 1052 This sets the Detector from the camera geometry 1053 and optionally set the Filter. In both cases this saves 1054 having to persist some data in each exposure (or image). 1058 mapping : `lsst.obs.base.Mapping` 1059 Where to get the values from. 1060 item : image-like object 1061 Can be any of lsst.afw.image.Exposure, 1062 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1063 or lsst.afw.image.MaskedImage 1068 Set filter? Ignored if item is already an exposure 1070 Should detector be marked as trimmed? 1071 setVisitInfo : `bool` 1072 Should Exposure have its VisitInfo filled out from the metadata? 1076 `lsst.afw.image.Exposure` 1077 The standardized Exposure. 1081 setVisitInfo=setVisitInfo)
1082 except Exception
as e:
1083 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1086 if mapping.level.lower() ==
"amp":
1088 elif mapping.level.lower() ==
"ccd":
1094 if mapping.level.lower() !=
"amp" and exposure.getWcs()
is None and \
1095 (exposure.getInfo().getVisitInfo()
is not None or exposure.getMetadata().toDict()):
1103 def _createSkyWcsFromMetadata(self, exposure):
1104 """Create a SkyWcs from the FITS header metadata in an Exposure. 1108 exposure : `lsst.afw.image.Exposure` 1109 The exposure to get metadata from, and attach the SkyWcs to. 1111 metadata = exposure.getMetadata()
1113 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1114 exposure.setWcs(wcs)
1115 except pexExcept.TypeError
as e:
1117 self.
log.debug(
"wcs set to None; missing information found in metadata to create a valid wcs:" 1120 exposure.setMetadata(metadata)
1122 def _createInitialSkyWcs(self, exposure):
1123 """Create a SkyWcs from the boresight and camera geometry. 1125 If the boresight or camera geometry do not support this method of 1126 WCS creation, this falls back on the header metadata-based version 1127 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 1131 exposure : `lsst.afw.image.Exposure` 1132 The exposure to get data from, and attach the SkyWcs to. 1137 if exposure.getInfo().getVisitInfo()
is None:
1138 msg =
"No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 1142 newSkyWcs =
createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1143 exposure.setWcs(newSkyWcs)
1144 except InitialSkyWcsError
as e:
1145 msg =
"Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 1146 self.
log.warn(msg, e)
1147 self.
log.debug(
"Exception was: %s", traceback.TracebackException.from_exception(e))
1148 if e.__context__
is not None:
1149 self.
log.debug(
"Root-cause Exception was: %s",
1150 traceback.TracebackException.from_exception(e.__context__))
1152 def _makeCamera(self, policy, repositoryDir):
1153 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 1156 Also set self.cameraDataLocation, if relevant (else it can be left 1159 This implementation assumes that policy contains an entry "camera" 1160 that points to the subdirectory in this package of camera data; 1161 specifically, that subdirectory must contain: 1162 - a file named `camera.py` that contains persisted camera config 1163 - ampInfo table FITS files, as required by 1164 lsst.afw.cameraGeom.makeCameraFromPath 1168 policy : `lsst.daf.persistence.Policy` 1169 Policy with per-camera defaults already merged 1170 (PexPolicy only for backward compatibility). 1171 repositoryDir : `str` 1172 Policy repository for the subclassing module (obtained with 1173 getRepositoryPath() on the per-camera default dictionary). 1175 if 'camera' not in policy:
1176 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1177 cameraDataSubdir = policy[
'camera']
1179 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1180 cameraConfig = afwCameraGeom.CameraConfig()
1183 return afwCameraGeom.makeCameraFromPath(
1184 cameraConfig=cameraConfig,
1185 ampInfoPath=ampInfoPath,
1191 """Get the registry used by this mapper. 1196 The registry used by this mapper for this mapper's repository. 1201 """Stuff image compression settings into a daf.base.PropertySet 1203 This goes into the ButlerLocation's "additionalData", which gets 1204 passed into the boost::persistence framework. 1209 Type of dataset for which to get the image compression settings. 1215 additionalData : `lsst.daf.base.PropertySet` 1216 Image compression settings. 1218 mapping = self.
mappings[datasetType]
1219 recipeName = mapping.recipe
1220 storageType = mapping.storage
1222 return dafBase.PropertySet()
1224 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1225 (datasetType, storageType, recipeName))
1226 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1227 seed = hash(tuple(dataId.items())) % 2**31
1228 for plane
in (
"image",
"mask",
"variance"):
1229 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1230 recipe.set(plane +
".scaling.seed", seed)
1233 def _initWriteRecipes(self):
1234 """Read the recipes for writing files 1236 These recipes are currently used for configuring FITS compression, 1237 but they could have wider uses for configuring different flavors 1238 of the storage types. A recipe is referred to by a symbolic name, 1239 which has associated settings. These settings are stored as a 1240 `PropertySet` so they can easily be passed down to the 1241 boost::persistence framework as the "additionalData" parameter. 1243 The list of recipes is written in YAML. A default recipe and 1244 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1245 and these may be overridden or supplemented by the individual obs_* 1246 packages' own policy/writeRecipes.yaml files. 1248 Recipes are grouped by the storage type. Currently, only the 1249 ``FitsStorage`` storage type uses recipes, which uses it to 1250 configure FITS image compression. 1252 Each ``FitsStorage`` recipe for FITS compression should define 1253 "image", "mask" and "variance" entries, each of which may contain 1254 "compression" and "scaling" entries. Defaults will be provided for 1255 any missing elements under "compression" and "scaling". 1257 The allowed entries under "compression" are: 1259 * algorithm (string): compression algorithm to use 1260 * rows (int): number of rows per tile (0 = entire dimension) 1261 * columns (int): number of columns per tile (0 = entire dimension) 1262 * quantizeLevel (float): cfitsio quantization level 1264 The allowed entries under "scaling" are: 1266 * algorithm (string): scaling algorithm to use 1267 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1268 * fuzz (bool): fuzz the values when quantising floating-point values? 1269 * seed (long): seed for random number generator when fuzzing 1270 * maskPlanes (list of string): mask planes to ignore when doing 1272 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1273 * quantizePad: number of stdev to allow on the low side (for 1274 STDEV_POSITIVE/NEGATIVE) 1275 * bscale: manually specified BSCALE (for MANUAL scaling) 1276 * bzero: manually specified BSCALE (for MANUAL scaling) 1278 A very simple example YAML recipe: 1284 algorithm: GZIP_SHUFFLE 1288 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1289 recipes = dafPersist.Policy(recipesFile)
1290 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1291 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1292 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1293 supplements = dafPersist.Policy(supplementsFile)
1295 for entry
in validationMenu:
1296 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1298 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1299 (supplementsFile, entry, recipesFile, intersection))
1300 recipes.update(supplements)
1303 for storageType
in recipes.names(
True):
1304 if "default" not in recipes[storageType]:
1305 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1306 (storageType, recipesFile))
1307 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1311 """Generate an Exposure from an image-like object 1313 If the image is a DecoratedImage then also set its WCS and metadata 1314 (Image and MaskedImage are missing the necessary metadata 1315 and Exposure already has those set) 1319 image : Image-like object 1320 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1325 `lsst.afw.image.Exposure` 1326 Exposure containing input image. 1329 if isinstance(image, afwImage.MaskedImage):
1330 exposure = afwImage.makeExposure(image)
1331 elif isinstance(image, afwImage.DecoratedImage):
1332 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1333 metadata = image.getMetadata()
1334 exposure.setMetadata(metadata)
1335 elif isinstance(image, afwImage.Exposure):
1337 metadata = exposure.getMetadata()
1339 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1342 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1343 if metadata
is not None:
1346 logger = lsstLog.Log.getLogger(
"CameraMapper")
1347 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1349 exposureId = mapper._computeCcdExposureId(dataId)
1350 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1352 exposure.getInfo().setVisitInfo(visitInfo)
1358 """Validate recipes for FitsStorage 1360 The recipes are supplemented with default values where appropriate. 1362 TODO: replace this custom validation code with Cerberus (DM-11846) 1366 recipes : `lsst.daf.persistence.Policy` 1367 FitsStorage recipes to validate. 1371 validated : `lsst.daf.base.PropertySet` 1372 Validated FitsStorage recipe. 1377 If validation fails. 1381 compressionSchema = {
1382 "algorithm":
"NONE",
1385 "quantizeLevel": 0.0,
1388 "algorithm":
"NONE",
1390 "maskPlanes": [
"NO_DATA"],
1392 "quantizeLevel": 4.0,
1399 def checkUnrecognized(entry, allowed, description):
1400 """Check to see if the entry contains unrecognised keywords""" 1401 unrecognized = set(entry.keys()) - set(allowed)
1404 "Unrecognized entries when parsing image compression recipe %s: %s" %
1405 (description, unrecognized))
1408 for name
in recipes.names(
True):
1409 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1410 rr = dafBase.PropertySet()
1411 validated[name] = rr
1412 for plane
in (
"image",
"mask",
"variance"):
1413 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1414 name +
"->" + plane)
1416 for settings, schema
in ((
"compression", compressionSchema),
1417 (
"scaling", scalingSchema)):
1418 prefix = plane +
"." + settings
1419 if settings
not in recipes[name][plane]:
1421 rr.set(prefix +
"." + key, schema[key])
1423 entry = recipes[name][plane][settings]
1424 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1426 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1427 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _createInitialSkyWcs(self, exposure)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def _createSkyWcsFromMetadata(self, exposure)
def createInitialSkyWcs(visitInfo, detector, flipX=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)