29 from astro_metadata_translator
import fix_header
30 import lsst.daf.persistence
as dafPersist
31 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
32 import lsst.daf.base
as dafBase
33 import lsst.afw.geom
as afwGeom
34 import lsst.afw.image
as afwImage
35 import lsst.afw.table
as afwTable
36 from lsst.afw.fits
import readMetadata
37 import lsst.afw.cameraGeom
as afwCameraGeom
38 import lsst.log
as lsstLog
40 from .exposureIdInfo
import ExposureIdInfo
41 from .makeRawVisitInfo
import MakeRawVisitInfo
42 from .utils
import createInitialSkyWcs, InitialSkyWcsError
45 __all__ = [
"CameraMapper",
"exposureFromImage"]
50 """CameraMapper is a base class for mappers that handle images from a 51 camera and products derived from them. This provides an abstraction layer 52 between the data on disk and the code. 54 Public methods: keys, queryMetadata, getDatasetTypes, map, 55 canStandardize, standardize 57 Mappers for specific data sources (e.g., CFHT Megacam, LSST 58 simulations, etc.) should inherit this class. 60 The CameraMapper manages datasets within a "root" directory. Note that 61 writing to a dataset present in the input root will hide the existing 62 dataset but not overwrite it. See #2160 for design discussion. 64 A camera is assumed to consist of one or more rafts, each composed of 65 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 66 (amps). A camera is also assumed to have a camera geometry description 67 (CameraGeom object) as a policy file, a filter description (Filter class 68 static configuration) as another policy file. 70 Information from the camera geometry and defects are inserted into all 71 Exposure objects returned. 73 The mapper uses one or two registries to retrieve metadata about the 74 images. The first is a registry of all raw exposures. This must contain 75 the time of the observation. One or more tables (or the equivalent) 76 within the registry are used to look up data identifier components that 77 are not specified by the user (e.g. filter) and to return results for 78 metadata queries. The second is an optional registry of all calibration 79 data. This should contain validity start and end entries for each 80 calibration dataset in the same timescale as the observation time. 82 Subclasses will typically set MakeRawVisitInfoClass and optionally the 83 metadata translator class: 85 MakeRawVisitInfoClass: a class variable that points to a subclass of 86 MakeRawVisitInfo, a functor that creates an 87 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 89 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 90 class to use for fixing metadata values. If it is not set an attempt 91 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 92 that the metadata fixup will try to infer the translator class from the 95 Subclasses must provide the following methods: 97 _extractDetectorName(self, dataId): returns the detector name for a CCD 98 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 99 a dataset identifier referring to that CCD or a subcomponent of it. 101 _computeCcdExposureId(self, dataId): see below 103 _computeCoaddExposureId(self, dataId, singleFilter): see below 105 Subclasses may also need to override the following methods: 107 _transformId(self, dataId): transformation of a data identifier 108 from colloquial usage (e.g., "ccdname") to proper/actual usage 109 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 110 commas). The default implementation does nothing. Note that this 111 method should not modify its input parameter. 113 getShortCcdName(self, ccdName): a static method that returns a shortened 114 name suitable for use as a filename. The default version converts spaces 117 _mapActualToPath(self, template, actualId): convert a template path to an 118 actual path, using the actual dataset identifier. 120 The mapper's behaviors are largely specified by the policy file. 121 See the MapperDictionary.paf for descriptions of the available items. 123 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 124 mappings (see Mappings class). 126 Common default mappings for all subclasses can be specified in the 127 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 128 provides a simple way to add a product to all camera mappers. 130 Functions to map (provide a path to the data given a dataset 131 identifier dictionary) and standardize (convert data into some standard 132 format or type) may be provided in the subclass as "map_{dataset type}" 133 and "std_{dataset type}", respectively. 135 If non-Exposure datasets cannot be retrieved using standard 136 daf_persistence methods alone, a "bypass_{dataset type}" function may be 137 provided in the subclass to return the dataset instead of using the 138 "datasets" subpolicy. 140 Implementations of map_camera and bypass_camera that should typically be 141 sufficient are provided in this base class. 147 Instead of auto-loading the camera at construction time, load it from 148 the calibration registry 152 policy : daf_persistence.Policy, 153 Policy with per-camera defaults already merged. 154 repositoryDir : string 155 Policy repository for the subclassing module (obtained with 156 getRepositoryPath() on the per-camera default dictionary). 157 root : string, optional 158 Path to the root directory for data. 159 registry : string, optional 160 Path to registry with data's metadata. 161 calibRoot : string, optional 162 Root directory for calibrations. 163 calibRegistry : string, optional 164 Path to registry with calibrations' metadata. 165 provided : list of string, optional 166 Keys provided by the mapper. 167 parentRegistry : Registry subclass, optional 168 Registry from a parent repository that may be used to look up 170 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 171 The configuration information for the repository this mapper is 178 MakeRawVisitInfoClass = MakeRawVisitInfo
181 PupilFactoryClass = afwCameraGeom.PupilFactory
184 translatorClass =
None 186 def __init__(self, policy, repositoryDir,
187 root=None, registry=None, calibRoot=None, calibRegistry=None,
188 provided=None, parentRegistry=None, repositoryCfg=None):
190 dafPersist.Mapper.__init__(self)
192 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
197 self.
root = repositoryCfg.root
201 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 202 if repoPolicy
is not None:
203 policy.update(repoPolicy)
207 if 'levels' in policy:
208 levelsPolicy = policy[
'levels']
209 for key
in levelsPolicy.names(
True):
210 self.
levels[key] = set(levelsPolicy.asArray(key))
213 if 'defaultSubLevels' in policy:
219 root = dafPersist.LogicalLocation(root).locString()
229 if calibRoot
is not None:
230 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
231 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
234 calibRoot = policy.get(
'calibRoot',
None)
236 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
238 if calibStorage
is None:
246 posixIfNoSql=(
not parentRegistry))
249 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
250 if needCalibRegistry:
253 "calibRegistryPath", calibStorage,
257 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
258 "calibRoot ivar:%s or policy['calibRoot']:%s" %
259 (calibRoot, policy.get(
'calibRoot',
None)))
279 raise ValueError(
'class variable packageName must not be None')
289 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
290 """Initialize mappings 292 For each of the dataset types that we want to be able to read, there 293 are methods that can be created to support them: 294 * map_<dataset> : determine the path for dataset 295 * std_<dataset> : standardize the retrieved dataset 296 * bypass_<dataset> : retrieve the dataset (bypassing the usual 298 * query_<dataset> : query the registry 300 Besides the dataset types explicitly listed in the policy, we create 301 additional, derived datasets for additional conveniences, 302 e.g., reading the header of an image, retrieving only the size of a 307 policy : `lsst.daf.persistence.Policy` 308 Policy with per-camera defaults already merged 309 rootStorage : `Storage subclass instance` 310 Interface to persisted repository data. 311 calibRoot : `Storage subclass instance` 312 Interface to persisted calib repository data 313 provided : `list` of `str` 314 Keys provided by the mapper 317 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
319 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
321 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
323 dsMappingPolicy = dafPersist.Policy()
327 (
"images", imgMappingPolicy, ImageMapping),
328 (
"exposures", expMappingPolicy, ExposureMapping),
329 (
"calibrations", calMappingPolicy, CalibrationMapping),
330 (
"datasets", dsMappingPolicy, DatasetMapping)
333 for name, defPolicy, cls
in mappingList:
335 datasets = policy[name]
338 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
339 if os.path.exists(defaultsPath):
340 datasets.merge(dafPersist.Policy(defaultsPath))
343 setattr(self, name, mappings)
344 for datasetType
in datasets.names(
True):
345 subPolicy = datasets[datasetType]
346 subPolicy.merge(defPolicy)
348 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
349 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
350 subPolicy=subPolicy):
351 components = subPolicy.get(
'composite')
352 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 353 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 354 python = subPolicy[
'python']
355 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
356 disassembler=disassembler,
360 for name, component
in components.items():
361 butlerComposite.add(id=name,
362 datasetType=component.get(
'datasetType'),
363 setter=component.get(
'setter',
None),
364 getter=component.get(
'getter',
None),
365 subset=component.get(
'subset',
False),
366 inputOnly=component.get(
'inputOnly',
False))
367 return butlerComposite
368 setattr(self,
"map_" + datasetType, compositeClosure)
372 if name ==
"calibrations":
374 provided=provided, dataRoot=rootStorage)
376 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
379 raise ValueError(f
"Duplicate mapping policy for dataset type {datasetType}")
380 self.
keyDict.update(mapping.keys())
381 mappings[datasetType] = mapping
382 self.
mappings[datasetType] = mapping
383 if not hasattr(self,
"map_" + datasetType):
384 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
385 return mapping.map(mapper, dataId, write)
386 setattr(self,
"map_" + datasetType, mapClosure)
387 if not hasattr(self,
"query_" + datasetType):
388 def queryClosure(format, dataId, mapping=mapping):
389 return mapping.lookup(format, dataId)
390 setattr(self,
"query_" + datasetType, queryClosure)
391 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
392 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
393 return mapping.standardize(mapper, item, dataId)
394 setattr(self,
"std_" + datasetType, stdClosure)
396 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
397 """Set convenience methods on CameraMapper""" 398 mapName =
"map_" + datasetType +
"_" + suffix
399 bypassName =
"bypass_" + datasetType +
"_" + suffix
400 queryName =
"query_" + datasetType +
"_" + suffix
401 if not hasattr(self, mapName):
402 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
403 if not hasattr(self, bypassName):
404 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
405 bypassImpl = getattr(self,
"bypass_" + datasetType)
406 if bypassImpl
is not None:
407 setattr(self, bypassName, bypassImpl)
408 if not hasattr(self, queryName):
409 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
412 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
413 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
415 if subPolicy[
"storage"] ==
"FitsStorage":
416 def getMetadata(datasetType, pythonType, location, dataId):
417 md = readMetadata(location.getLocationsWithRoot()[0])
421 setMethods(
"md", bypassImpl=getMetadata)
424 addName =
"add_" + datasetType
425 if not hasattr(self, addName):
428 if name ==
"exposures":
429 def getSkyWcs(datasetType, pythonType, location, dataId):
430 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
431 return fitsReader.readWcs()
433 setMethods(
"wcs", bypassImpl=getSkyWcs)
435 def getPhotoCalib(datasetType, pythonType, location, dataId):
436 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
437 return fitsReader.readPhotoCalib()
439 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
441 def getVisitInfo(datasetType, pythonType, location, dataId):
442 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
443 return fitsReader.readVisitInfo()
445 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
447 def getFilter(datasetType, pythonType, location, dataId):
448 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
449 return fitsReader.readFilter()
451 setMethods(
"filter", bypassImpl=getFilter)
453 setMethods(
"detector",
454 mapImpl=
lambda dataId, write=
False:
455 dafPersist.ButlerLocation(
456 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
458 storageName=
"Internal",
459 locationList=
"ignored",
464 bypassImpl=
lambda datasetType, pythonType, location, dataId:
468 def getBBox(datasetType, pythonType, location, dataId):
469 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
471 return afwImage.bboxFromMetadata(md)
473 setMethods(
"bbox", bypassImpl=getBBox)
475 elif name ==
"images":
476 def getBBox(datasetType, pythonType, location, dataId):
477 md = readMetadata(location.getLocationsWithRoot()[0])
479 return afwImage.bboxFromMetadata(md)
480 setMethods(
"bbox", bypassImpl=getBBox)
482 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
484 def getMetadata(datasetType, pythonType, location, dataId):
485 md = readMetadata(os.path.join(location.getStorage().root,
486 location.getLocations()[0]), hdu=1)
490 setMethods(
"md", bypassImpl=getMetadata)
493 if subPolicy[
"storage"] ==
"FitsStorage":
494 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
495 subId = dataId.copy()
497 loc = mapping.map(mapper, subId, write)
498 bbox = dataId[
'bbox']
499 llcX = bbox.getMinX()
500 llcY = bbox.getMinY()
501 width = bbox.getWidth()
502 height = bbox.getHeight()
503 loc.additionalData.set(
'llcX', llcX)
504 loc.additionalData.set(
'llcY', llcY)
505 loc.additionalData.set(
'width', width)
506 loc.additionalData.set(
'height', height)
507 if 'imageOrigin' in dataId:
508 loc.additionalData.set(
'imageOrigin',
509 dataId[
'imageOrigin'])
512 def querySubClosure(key, format, dataId, mapping=mapping):
513 subId = dataId.copy()
515 return mapping.lookup(format, subId)
516 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
518 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
521 def getLen(datasetType, pythonType, location, dataId):
522 md = readMetadata(os.path.join(location.getStorage().root,
523 location.getLocations()[0]), hdu=1)
527 setMethods(
"len", bypassImpl=getLen)
530 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
531 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
532 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
533 location.getLocations()[0])))
535 def _computeCcdExposureId(self, dataId):
536 """Compute the 64-bit (long) identifier for a CCD exposure. 538 Subclasses must override 543 Data identifier with visit, ccd. 545 raise NotImplementedError()
547 def _computeCoaddExposureId(self, dataId, singleFilter):
548 """Compute the 64-bit (long) identifier for a coadd. 550 Subclasses must override 555 Data identifier with tract and patch. 556 singleFilter : `bool` 557 True means the desired ID is for a single-filter coadd, in which 558 case dataIdmust contain filter. 560 raise NotImplementedError()
562 def _search(self, path):
563 """Search for path in the associated repository's storage. 568 Path that describes an object in the repository associated with 570 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 571 indicator will be stripped when searching and so will match 572 filenames without the HDU indicator, e.g. 'foo.fits'. The path 573 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 578 The path for this object in the repository. Will return None if the 579 object can't be found. If the input argument path contained an HDU 580 indicator, the returned path will also contain the HDU indicator. 585 """Rename any existing object with the given type and dataId. 587 The CameraMapper implementation saves objects in a sequence of e.g.: 593 All of the backups will be placed in the output repo, however, and will 594 not be removed if they are found elsewhere in the _parent chain. This 595 means that the same file will be stored twice if the previous version 596 was found in an input repo. 605 def firstElement(list):
606 """Get the first element in the list, or None if that can't be 609 return list[0]
if list
is not None and len(list)
else None 612 newLocation = self.map(datasetType, dataId, write=
True)
613 newPath = newLocation.getLocations()[0]
614 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
615 path = firstElement(path)
617 while path
is not None:
619 oldPaths.append((n, path))
620 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
621 path = firstElement(path)
622 for n, oldPath
in reversed(oldPaths):
623 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
626 """Return supported keys. 631 List of keys usable in a dataset identifier 636 """Return a dict of supported keys and their value types for a given 637 dataset type at a given level of the key hierarchy. 642 Dataset type or None for all dataset types. 643 level : `str` or None 644 Level or None for all levels or '' for the default level for the 650 Keys are strings usable in a dataset identifier, values are their 658 if datasetType
is None:
659 keyDict = copy.copy(self.
keyDict)
662 if level
is not None and level
in self.
levels:
663 keyDict = copy.copy(keyDict)
664 for l
in self.
levels[level]:
679 """Return the name of the camera that this CameraMapper is for.""" 681 className = className[className.find(
'.'):-1]
682 m = re.search(
r'(\w+)Mapper', className)
684 m = re.search(
r"class '[\w.]*?(\w+)'", className)
686 return name[:1].lower() + name[1:]
if name
else '' 690 """Return the name of the package containing this CameraMapper.""" 692 raise ValueError(
'class variable packageName must not be None')
697 """Return the base directory of this package""" 701 """Map a camera dataset.""" 703 raise RuntimeError(
"No camera dataset available.")
705 return dafPersist.ButlerLocation(
706 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
708 storageName=
"ConfigStorage",
716 """Return the (preloaded) camera object. 719 raise RuntimeError(
"No camera dataset available.")
723 return dafPersist.ButlerLocation(
724 pythonType=
"lsst.obs.base.ExposureIdInfo",
726 storageName=
"Internal",
727 locationList=
"ignored",
734 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 735 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
736 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
740 """Disable standardization for bfKernel 742 bfKernel is a calibration product that is numpy array, 743 unlike other calibration products that are all images; 744 all calibration images are sent through _standardizeExposure 745 due to CalibrationMapping, but we don't want that to happen to bfKernel 750 """Standardize a raw dataset by converting it to an Exposure instead 753 trimmed=
False, setVisitInfo=
True)
756 """Map a sky policy.""" 757 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
758 "Internal",
None,
None, self,
762 """Standardize a sky policy by returning the one we use.""" 763 return self.skypolicy
771 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
773 """Set up a registry (usually SQLite3), trying a number of possible 781 Description of registry (for log messages) 785 Policy that contains the registry name, used if path is None. 787 Key in policy for registry path. 788 storage : Storage subclass 789 Repository Storage to look in. 790 searchParents : bool, optional 791 True if the search for a registry should follow any Butler v1 793 posixIfNoSql : bool, optional 794 If an sqlite registry is not found, will create a posix registry if 799 lsst.daf.persistence.Registry 802 if path
is None and policyKey
in policy:
803 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
804 if os.path.isabs(path):
805 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
806 if not storage.exists(path):
807 newPath = storage.instanceSearch(path)
809 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 811 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
815 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
823 if path
and (path.startswith(root)):
824 path = path[len(root +
'/'):]
825 except AttributeError:
831 def search(filename, description):
832 """Search for file in storage 837 Filename to search for 839 Description of file, for error message. 843 path : `str` or `None` 844 Path to file, or None 846 result = storage.instanceSearch(filename)
849 self.
log.debug(
"Unable to locate %s: %s", description, filename)
854 path = search(
"%s.pgsql" % name,
"%s in root" % description)
856 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
858 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
861 if not storage.exists(path):
862 newPath = storage.instanceSearch(path)
863 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 864 if newPath
is not None:
866 localFileObj = storage.getLocalFile(path)
867 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
868 registry = dafPersist.Registry.create(localFileObj.name)
870 elif not registry
and posixIfNoSql:
872 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
873 registry = dafPersist.PosixRegistry(storage.root)
879 def _transformId(self, dataId):
880 """Generate a standard ID dict from a camera-specific ID dict. 882 Canonical keys include: 883 - amp: amplifier name 884 - ccd: CCD name (in LSST this is a combination of raft and sensor) 885 The default implementation returns a copy of its input. 890 Dataset identifier; this must not be modified 895 Transformed dataset identifier. 900 def _mapActualToPath(self, template, actualId):
901 """Convert a template path to an actual path, using the actual data 902 identifier. This implementation is usually sufficient but can be 903 overridden by the subclass. 920 return template % transformedId
921 except Exception
as e:
922 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
926 """Convert a CCD name to a form useful as a filename 928 The default implementation converts spaces to underscores. 930 return ccdName.replace(
" ",
"_")
932 def _extractDetectorName(self, dataId):
933 """Extract the detector (CCD) name from the dataset identifier. 935 The name in question is the detector name used by lsst.afw.cameraGeom. 947 raise NotImplementedError(
"No _extractDetectorName() function specified")
949 def _extractAmpId(self, dataId):
950 """Extract the amplifier identifer from a dataset identifier. 952 .. note:: Deprecated in 11_0 954 amplifier identifier has two parts: the detector name for the CCD 955 containing the amplifier and index of the amplifier in the detector. 969 return (trDataId[
"ccd"], int(trDataId[
'amp']))
971 def _setAmpDetector(self, item, dataId, trimmed=True):
972 """Set the detector object in an Exposure for an amplifier. 974 Defects are also added to the Exposure based on the detector object. 978 item : `lsst.afw.image.Exposure` 979 Exposure to set the detector in. 983 Should detector be marked as trimmed? (ignored) 988 def _setCcdDetector(self, item, dataId, trimmed=True):
989 """Set the detector object in an Exposure for a CCD. 993 item : `lsst.afw.image.Exposure` 994 Exposure to set the detector in. 998 Should detector be marked as trimmed? (ignored) 1000 if item.getDetector()
is not None:
1004 detector = self.
camera[detectorName]
1005 item.setDetector(detector)
1007 def _setFilter(self, mapping, item, dataId):
1008 """Set the filter object in an Exposure. If the Exposure had a FILTER 1009 keyword, this was already processed during load. But if it didn't, 1010 use the filter from the registry. 1014 mapping : `lsst.obs.base.Mapping` 1015 Where to get the filter from. 1016 item : `lsst.afw.image.Exposure` 1017 Exposure to set the filter in. 1022 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 1023 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1026 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1029 actualId = mapping.need([
'filter'], dataId)
1030 filterName = actualId[
'filter']
1032 filterName = self.
filters[filterName]
1034 item.setFilter(afwImage.Filter(filterName))
1035 except pexExcept.NotFoundError:
1036 self.
log.warn(
"Filter %s not defined. Set to UNKNOWN." % (filterName))
1038 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1039 trimmed=True, setVisitInfo=True):
1040 """Default standardization function for images. 1042 This sets the Detector from the camera geometry 1043 and optionally set the Filter. In both cases this saves 1044 having to persist some data in each exposure (or image). 1048 mapping : `lsst.obs.base.Mapping` 1049 Where to get the values from. 1050 item : image-like object 1051 Can be any of lsst.afw.image.Exposure, 1052 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1053 or lsst.afw.image.MaskedImage 1058 Set filter? Ignored if item is already an exposure 1060 Should detector be marked as trimmed? 1061 setVisitInfo : `bool` 1062 Should Exposure have its VisitInfo filled out from the metadata? 1066 `lsst.afw.image.Exposure` 1067 The standardized Exposure. 1071 setVisitInfo=setVisitInfo)
1072 except Exception
as e:
1073 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1076 if mapping.level.lower() ==
"amp":
1078 elif mapping.level.lower() ==
"ccd":
1083 if exposure.getWcs()
is None and \
1084 (exposure.getInfo().getVisitInfo()
is not None or exposure.getMetadata().toDict() != {}):
1092 def _createSkyWcsFromMetadata(self, exposure):
1093 """Create a SkyWcs from the FITS header metadata in an Exposure. 1097 exposure : `lsst.afw.image.Exposure` 1098 The exposure to get metadata from, and attach the SkyWcs to. 1100 metadata = exposure.getMetadata()
1102 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1103 exposure.setWcs(wcs)
1104 except pexExcept.TypeError
as e:
1106 self.
log.debug(
"wcs set to None; missing information found in metadata to create a valid wcs:" 1109 exposure.setMetadata(metadata)
1111 def _createInitialSkyWcs(self, exposure):
1112 """Create a SkyWcs from the boresight and camera geometry. 1114 If the boresight or camera geometry do not support this method of 1115 WCS creation, this falls back on the header metadata-based version 1116 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 1120 exposure : `lsst.afw.image.Exposure` 1121 The exposure to get data from, and attach the SkyWcs to. 1126 if exposure.getInfo().getVisitInfo()
is None:
1127 msg =
"No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 1131 newSkyWcs =
createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1132 exposure.setWcs(newSkyWcs)
1133 except InitialSkyWcsError
as e:
1134 msg =
"Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 1135 self.
log.warn(msg, e)
1136 self.
log.debug(
"Exception was: %s", traceback.TracebackException.from_exception(e))
1137 if e.__context__
is not None:
1138 self.
log.debug(
"Root-cause Exception was: %s",
1139 traceback.TracebackException.from_exception(e.__context__))
1141 def _makeCamera(self, policy, repositoryDir):
1142 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 1145 Also set self.cameraDataLocation, if relevant (else it can be left 1148 This implementation assumes that policy contains an entry "camera" 1149 that points to the subdirectory in this package of camera data; 1150 specifically, that subdirectory must contain: 1151 - a file named `camera.py` that contains persisted camera config 1152 - ampInfo table FITS files, as required by 1153 lsst.afw.cameraGeom.makeCameraFromPath 1157 policy : `lsst.daf.persistence.Policy` 1158 Policy with per-camera defaults already merged 1159 (PexPolicy only for backward compatibility). 1160 repositoryDir : `str` 1161 Policy repository for the subclassing module (obtained with 1162 getRepositoryPath() on the per-camera default dictionary). 1164 if 'camera' not in policy:
1165 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1166 cameraDataSubdir = policy[
'camera']
1168 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1169 cameraConfig = afwCameraGeom.CameraConfig()
1172 return afwCameraGeom.makeCameraFromPath(
1173 cameraConfig=cameraConfig,
1174 ampInfoPath=ampInfoPath,
1180 """Get the registry used by this mapper. 1185 The registry used by this mapper for this mapper's repository. 1190 """Stuff image compression settings into a daf.base.PropertySet 1192 This goes into the ButlerLocation's "additionalData", which gets 1193 passed into the boost::persistence framework. 1198 Type of dataset for which to get the image compression settings. 1204 additionalData : `lsst.daf.base.PropertySet` 1205 Image compression settings. 1207 mapping = self.
mappings[datasetType]
1208 recipeName = mapping.recipe
1209 storageType = mapping.storage
1211 return dafBase.PropertySet()
1213 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1214 (datasetType, storageType, recipeName))
1215 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1216 seed = hash(tuple(dataId.items())) % 2**31
1217 for plane
in (
"image",
"mask",
"variance"):
1218 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1219 recipe.set(plane +
".scaling.seed", seed)
1222 def _initWriteRecipes(self):
1223 """Read the recipes for writing files 1225 These recipes are currently used for configuring FITS compression, 1226 but they could have wider uses for configuring different flavors 1227 of the storage types. A recipe is referred to by a symbolic name, 1228 which has associated settings. These settings are stored as a 1229 `PropertySet` so they can easily be passed down to the 1230 boost::persistence framework as the "additionalData" parameter. 1232 The list of recipes is written in YAML. A default recipe and 1233 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1234 and these may be overridden or supplemented by the individual obs_* 1235 packages' own policy/writeRecipes.yaml files. 1237 Recipes are grouped by the storage type. Currently, only the 1238 ``FitsStorage`` storage type uses recipes, which uses it to 1239 configure FITS image compression. 1241 Each ``FitsStorage`` recipe for FITS compression should define 1242 "image", "mask" and "variance" entries, each of which may contain 1243 "compression" and "scaling" entries. Defaults will be provided for 1244 any missing elements under "compression" and "scaling". 1246 The allowed entries under "compression" are: 1248 * algorithm (string): compression algorithm to use 1249 * rows (int): number of rows per tile (0 = entire dimension) 1250 * columns (int): number of columns per tile (0 = entire dimension) 1251 * quantizeLevel (float): cfitsio quantization level 1253 The allowed entries under "scaling" are: 1255 * algorithm (string): scaling algorithm to use 1256 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1257 * fuzz (bool): fuzz the values when quantising floating-point values? 1258 * seed (long): seed for random number generator when fuzzing 1259 * maskPlanes (list of string): mask planes to ignore when doing 1261 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1262 * quantizePad: number of stdev to allow on the low side (for 1263 STDEV_POSITIVE/NEGATIVE) 1264 * bscale: manually specified BSCALE (for MANUAL scaling) 1265 * bzero: manually specified BSCALE (for MANUAL scaling) 1267 A very simple example YAML recipe: 1273 algorithm: GZIP_SHUFFLE 1277 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1278 recipes = dafPersist.Policy(recipesFile)
1279 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1280 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1281 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1282 supplements = dafPersist.Policy(supplementsFile)
1284 for entry
in validationMenu:
1285 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1287 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1288 (supplementsFile, entry, recipesFile, intersection))
1289 recipes.update(supplements)
1292 for storageType
in recipes.names(
True):
1293 if "default" not in recipes[storageType]:
1294 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1295 (storageType, recipesFile))
1296 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1300 """Generate an Exposure from an image-like object 1302 If the image is a DecoratedImage then also set its WCS and metadata 1303 (Image and MaskedImage are missing the necessary metadata 1304 and Exposure already has those set) 1308 image : Image-like object 1309 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1314 `lsst.afw.image.Exposure` 1315 Exposure containing input image. 1318 if isinstance(image, afwImage.MaskedImage):
1319 exposure = afwImage.makeExposure(image)
1320 elif isinstance(image, afwImage.DecoratedImage):
1321 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1322 metadata = image.getMetadata()
1323 exposure.setMetadata(metadata)
1324 elif isinstance(image, afwImage.Exposure):
1326 metadata = exposure.getMetadata()
1328 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1331 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1332 if metadata
is not None:
1335 logger = lsstLog.Log.getLogger(
"CameraMapper")
1336 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1338 exposureId = mapper._computeCcdExposureId(dataId)
1339 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1341 exposure.getInfo().setVisitInfo(visitInfo)
1347 """Validate recipes for FitsStorage 1349 The recipes are supplemented with default values where appropriate. 1351 TODO: replace this custom validation code with Cerberus (DM-11846) 1355 recipes : `lsst.daf.persistence.Policy` 1356 FitsStorage recipes to validate. 1360 validated : `lsst.daf.base.PropertySet` 1361 Validated FitsStorage recipe. 1366 If validation fails. 1370 compressionSchema = {
1371 "algorithm":
"NONE",
1374 "quantizeLevel": 0.0,
1377 "algorithm":
"NONE",
1379 "maskPlanes": [
"NO_DATA"],
1381 "quantizeLevel": 4.0,
1388 def checkUnrecognized(entry, allowed, description):
1389 """Check to see if the entry contains unrecognised keywords""" 1390 unrecognized = set(entry.keys()) - set(allowed)
1393 "Unrecognized entries when parsing image compression recipe %s: %s" %
1394 (description, unrecognized))
1397 for name
in recipes.names(
True):
1398 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1399 rr = dafBase.PropertySet()
1400 validated[name] = rr
1401 for plane
in (
"image",
"mask",
"variance"):
1402 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1403 name +
"->" + plane)
1405 for settings, schema
in ((
"compression", compressionSchema),
1406 (
"scaling", scalingSchema)):
1407 prefix = plane +
"." + settings
1408 if settings
not in recipes[name][plane]:
1410 rr.set(prefix +
"." + key, schema[key])
1412 entry = recipes[name][plane][settings]
1413 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1415 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1416 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _createInitialSkyWcs(self, exposure)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def _createSkyWcsFromMetadata(self, exposure)
def createInitialSkyWcs(visitInfo, detector, flipX=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)