29 from deprecated.sphinx
import deprecated
31 from astro_metadata_translator
import fix_header
32 import lsst.daf.persistence
as dafPersist
33 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
34 import lsst.daf.base
as dafBase
35 import lsst.afw.geom
as afwGeom
36 import lsst.afw.image
as afwImage
37 import lsst.afw.table
as afwTable
38 from lsst.afw.fits
import readMetadata
39 import lsst.afw.cameraGeom
as afwCameraGeom
40 import lsst.log
as lsstLog
42 from .exposureIdInfo
import ExposureIdInfo
43 from .makeRawVisitInfo
import MakeRawVisitInfo
44 from .utils
import createInitialSkyWcs, InitialSkyWcsError
47 __all__ = [
"CameraMapper",
"exposureFromImage"]
52 """CameraMapper is a base class for mappers that handle images from a 53 camera and products derived from them. This provides an abstraction layer 54 between the data on disk and the code. 56 Public methods: keys, queryMetadata, getDatasetTypes, map, 57 canStandardize, standardize 59 Mappers for specific data sources (e.g., CFHT Megacam, LSST 60 simulations, etc.) should inherit this class. 62 The CameraMapper manages datasets within a "root" directory. Note that 63 writing to a dataset present in the input root will hide the existing 64 dataset but not overwrite it. See #2160 for design discussion. 66 A camera is assumed to consist of one or more rafts, each composed of 67 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 68 (amps). A camera is also assumed to have a camera geometry description 69 (CameraGeom object) as a policy file, a filter description (Filter class 70 static configuration) as another policy file. 72 Information from the camera geometry and defects are inserted into all 73 Exposure objects returned. 75 The mapper uses one or two registries to retrieve metadata about the 76 images. The first is a registry of all raw exposures. This must contain 77 the time of the observation. One or more tables (or the equivalent) 78 within the registry are used to look up data identifier components that 79 are not specified by the user (e.g. filter) and to return results for 80 metadata queries. The second is an optional registry of all calibration 81 data. This should contain validity start and end entries for each 82 calibration dataset in the same timescale as the observation time. 84 Subclasses will typically set MakeRawVisitInfoClass and optionally the 85 metadata translator class: 87 MakeRawVisitInfoClass: a class variable that points to a subclass of 88 MakeRawVisitInfo, a functor that creates an 89 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 91 translatorClass: The `~astro_metadata_translator.MetadataTranslator` 92 class to use for fixing metadata values. If it is not set an attempt 93 will be made to infer the class from ``MakeRawVisitInfoClass``, failing 94 that the metadata fixup will try to infer the translator class from the 97 Subclasses must provide the following methods: 99 _extractDetectorName(self, dataId): returns the detector name for a CCD 100 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 101 a dataset identifier referring to that CCD or a subcomponent of it. 103 _computeCcdExposureId(self, dataId): see below 105 _computeCoaddExposureId(self, dataId, singleFilter): see below 107 Subclasses may also need to override the following methods: 109 _transformId(self, dataId): transformation of a data identifier 110 from colloquial usage (e.g., "ccdname") to proper/actual usage 111 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 112 commas). The default implementation does nothing. Note that this 113 method should not modify its input parameter. 115 getShortCcdName(self, ccdName): a static method that returns a shortened 116 name suitable for use as a filename. The default version converts spaces 119 _mapActualToPath(self, template, actualId): convert a template path to an 120 actual path, using the actual dataset identifier. 122 The mapper's behaviors are largely specified by the policy file. 123 See the MapperDictionary.paf for descriptions of the available items. 125 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 126 mappings (see Mappings class). 128 Common default mappings for all subclasses can be specified in the 129 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 130 provides a simple way to add a product to all camera mappers. 132 Functions to map (provide a path to the data given a dataset 133 identifier dictionary) and standardize (convert data into some standard 134 format or type) may be provided in the subclass as "map_{dataset type}" 135 and "std_{dataset type}", respectively. 137 If non-Exposure datasets cannot be retrieved using standard 138 daf_persistence methods alone, a "bypass_{dataset type}" function may be 139 provided in the subclass to return the dataset instead of using the 140 "datasets" subpolicy. 142 Implementations of map_camera and bypass_camera that should typically be 143 sufficient are provided in this base class. 149 Instead of auto-loading the camera at construction time, load it from 150 the calibration registry 154 policy : daf_persistence.Policy, 155 Policy with per-camera defaults already merged. 156 repositoryDir : string 157 Policy repository for the subclassing module (obtained with 158 getRepositoryPath() on the per-camera default dictionary). 159 root : string, optional 160 Path to the root directory for data. 161 registry : string, optional 162 Path to registry with data's metadata. 163 calibRoot : string, optional 164 Root directory for calibrations. 165 calibRegistry : string, optional 166 Path to registry with calibrations' metadata. 167 provided : list of string, optional 168 Keys provided by the mapper. 169 parentRegistry : Registry subclass, optional 170 Registry from a parent repository that may be used to look up 172 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 173 The configuration information for the repository this mapper is 180 MakeRawVisitInfoClass = MakeRawVisitInfo
183 PupilFactoryClass = afwCameraGeom.PupilFactory
186 translatorClass =
None 188 def __init__(self, policy, repositoryDir,
189 root=None, registry=None, calibRoot=None, calibRegistry=None,
190 provided=None, parentRegistry=None, repositoryCfg=None):
192 dafPersist.Mapper.__init__(self)
194 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
199 self.
root = repositoryCfg.root
203 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 204 if repoPolicy
is not None:
205 policy.update(repoPolicy)
209 if 'levels' in policy:
210 levelsPolicy = policy[
'levels']
211 for key
in levelsPolicy.names(
True):
212 self.
levels[key] = set(levelsPolicy.asArray(key))
215 if 'defaultSubLevels' in policy:
221 root = dafPersist.LogicalLocation(root).locString()
231 if calibRoot
is not None:
232 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
233 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
236 calibRoot = policy.get(
'calibRoot',
None)
238 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
240 if calibStorage
is None:
248 posixIfNoSql=(
not parentRegistry))
251 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
252 if needCalibRegistry:
255 "calibRegistryPath", calibStorage,
259 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
260 "calibRoot ivar:%s or policy['calibRoot']:%s" %
261 (calibRoot, policy.get(
'calibRoot',
None)))
281 raise ValueError(
'class variable packageName must not be None')
291 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
292 """Initialize mappings 294 For each of the dataset types that we want to be able to read, there 295 are methods that can be created to support them: 296 * map_<dataset> : determine the path for dataset 297 * std_<dataset> : standardize the retrieved dataset 298 * bypass_<dataset> : retrieve the dataset (bypassing the usual 300 * query_<dataset> : query the registry 302 Besides the dataset types explicitly listed in the policy, we create 303 additional, derived datasets for additional conveniences, 304 e.g., reading the header of an image, retrieving only the size of a 309 policy : `lsst.daf.persistence.Policy` 310 Policy with per-camera defaults already merged 311 rootStorage : `Storage subclass instance` 312 Interface to persisted repository data. 313 calibRoot : `Storage subclass instance` 314 Interface to persisted calib repository data 315 provided : `list` of `str` 316 Keys provided by the mapper 319 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
321 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
323 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
325 dsMappingPolicy = dafPersist.Policy()
329 (
"images", imgMappingPolicy, ImageMapping),
330 (
"exposures", expMappingPolicy, ExposureMapping),
331 (
"calibrations", calMappingPolicy, CalibrationMapping),
332 (
"datasets", dsMappingPolicy, DatasetMapping)
335 for name, defPolicy, cls
in mappingList:
337 datasets = policy[name]
340 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
341 if os.path.exists(defaultsPath):
342 datasets.merge(dafPersist.Policy(defaultsPath))
345 setattr(self, name, mappings)
346 for datasetType
in datasets.names(
True):
347 subPolicy = datasets[datasetType]
348 subPolicy.merge(defPolicy)
350 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
351 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
352 subPolicy=subPolicy):
353 components = subPolicy.get(
'composite')
354 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 355 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 356 python = subPolicy[
'python']
357 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
358 disassembler=disassembler,
362 for name, component
in components.items():
363 butlerComposite.add(id=name,
364 datasetType=component.get(
'datasetType'),
365 setter=component.get(
'setter',
None),
366 getter=component.get(
'getter',
None),
367 subset=component.get(
'subset',
False),
368 inputOnly=component.get(
'inputOnly',
False))
369 return butlerComposite
370 setattr(self,
"map_" + datasetType, compositeClosure)
374 if name ==
"calibrations":
376 provided=provided, dataRoot=rootStorage)
378 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
381 raise ValueError(f
"Duplicate mapping policy for dataset type {datasetType}")
382 self.
keyDict.update(mapping.keys())
383 mappings[datasetType] = mapping
384 self.
mappings[datasetType] = mapping
385 if not hasattr(self,
"map_" + datasetType):
386 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
387 return mapping.map(mapper, dataId, write)
388 setattr(self,
"map_" + datasetType, mapClosure)
389 if not hasattr(self,
"query_" + datasetType):
390 def queryClosure(format, dataId, mapping=mapping):
391 return mapping.lookup(format, dataId)
392 setattr(self,
"query_" + datasetType, queryClosure)
393 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
394 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
395 return mapping.standardize(mapper, item, dataId)
396 setattr(self,
"std_" + datasetType, stdClosure)
398 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
399 """Set convenience methods on CameraMapper""" 400 mapName =
"map_" + datasetType +
"_" + suffix
401 bypassName =
"bypass_" + datasetType +
"_" + suffix
402 queryName =
"query_" + datasetType +
"_" + suffix
403 if not hasattr(self, mapName):
404 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
405 if not hasattr(self, bypassName):
406 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
407 bypassImpl = getattr(self,
"bypass_" + datasetType)
408 if bypassImpl
is not None:
409 setattr(self, bypassName, bypassImpl)
410 if not hasattr(self, queryName):
411 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
414 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
415 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
417 if subPolicy[
"storage"] ==
"FitsStorage":
418 def getMetadata(datasetType, pythonType, location, dataId):
419 md = readMetadata(location.getLocationsWithRoot()[0])
423 setMethods(
"md", bypassImpl=getMetadata)
426 addName =
"add_" + datasetType
427 if not hasattr(self, addName):
430 if name ==
"exposures":
431 def getSkyWcs(datasetType, pythonType, location, dataId):
432 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
433 return fitsReader.readWcs()
435 setMethods(
"wcs", bypassImpl=getSkyWcs)
437 def getPhotoCalib(datasetType, pythonType, location, dataId):
438 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
439 return fitsReader.readPhotoCalib()
441 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
443 def getVisitInfo(datasetType, pythonType, location, dataId):
444 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
445 return fitsReader.readVisitInfo()
447 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
449 def getFilter(datasetType, pythonType, location, dataId):
450 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
451 return fitsReader.readFilter()
453 setMethods(
"filter", bypassImpl=getFilter)
455 setMethods(
"detector",
456 mapImpl=
lambda dataId, write=
False:
457 dafPersist.ButlerLocation(
458 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
460 storageName=
"Internal",
461 locationList=
"ignored",
466 bypassImpl=
lambda datasetType, pythonType, location, dataId:
470 def getBBox(datasetType, pythonType, location, dataId):
471 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
473 return afwImage.bboxFromMetadata(md)
475 setMethods(
"bbox", bypassImpl=getBBox)
477 elif name ==
"images":
478 def getBBox(datasetType, pythonType, location, dataId):
479 md = readMetadata(location.getLocationsWithRoot()[0])
481 return afwImage.bboxFromMetadata(md)
482 setMethods(
"bbox", bypassImpl=getBBox)
484 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
486 def getMetadata(datasetType, pythonType, location, dataId):
487 md = readMetadata(os.path.join(location.getStorage().root,
488 location.getLocations()[0]), hdu=1)
492 setMethods(
"md", bypassImpl=getMetadata)
495 if subPolicy[
"storage"] ==
"FitsStorage":
496 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
497 subId = dataId.copy()
499 loc = mapping.map(mapper, subId, write)
500 bbox = dataId[
'bbox']
501 llcX = bbox.getMinX()
502 llcY = bbox.getMinY()
503 width = bbox.getWidth()
504 height = bbox.getHeight()
505 loc.additionalData.set(
'llcX', llcX)
506 loc.additionalData.set(
'llcY', llcY)
507 loc.additionalData.set(
'width', width)
508 loc.additionalData.set(
'height', height)
509 if 'imageOrigin' in dataId:
510 loc.additionalData.set(
'imageOrigin',
511 dataId[
'imageOrigin'])
514 def querySubClosure(key, format, dataId, mapping=mapping):
515 subId = dataId.copy()
517 return mapping.lookup(format, subId)
518 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
520 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
523 def getLen(datasetType, pythonType, location, dataId):
524 md = readMetadata(os.path.join(location.getStorage().root,
525 location.getLocations()[0]), hdu=1)
529 setMethods(
"len", bypassImpl=getLen)
532 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
533 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
534 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
535 location.getLocations()[0])))
537 def _computeCcdExposureId(self, dataId):
538 """Compute the 64-bit (long) identifier for a CCD exposure. 540 Subclasses must override 545 Data identifier with visit, ccd. 547 raise NotImplementedError()
549 def _computeCoaddExposureId(self, dataId, singleFilter):
550 """Compute the 64-bit (long) identifier for a coadd. 552 Subclasses must override 557 Data identifier with tract and patch. 558 singleFilter : `bool` 559 True means the desired ID is for a single-filter coadd, in which 560 case dataIdmust contain filter. 562 raise NotImplementedError()
564 def _search(self, path):
565 """Search for path in the associated repository's storage. 570 Path that describes an object in the repository associated with 572 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 573 indicator will be stripped when searching and so will match 574 filenames without the HDU indicator, e.g. 'foo.fits'. The path 575 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 580 The path for this object in the repository. Will return None if the 581 object can't be found. If the input argument path contained an HDU 582 indicator, the returned path will also contain the HDU indicator. 587 """Rename any existing object with the given type and dataId. 589 The CameraMapper implementation saves objects in a sequence of e.g.: 595 All of the backups will be placed in the output repo, however, and will 596 not be removed if they are found elsewhere in the _parent chain. This 597 means that the same file will be stored twice if the previous version 598 was found in an input repo. 607 def firstElement(list):
608 """Get the first element in the list, or None if that can't be 611 return list[0]
if list
is not None and len(list)
else None 614 newLocation = self.map(datasetType, dataId, write=
True)
615 newPath = newLocation.getLocations()[0]
616 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
617 path = firstElement(path)
619 while path
is not None:
621 oldPaths.append((n, path))
622 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
623 path = firstElement(path)
624 for n, oldPath
in reversed(oldPaths):
625 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
628 """Return supported keys. 633 List of keys usable in a dataset identifier 638 """Return a dict of supported keys and their value types for a given 639 dataset type at a given level of the key hierarchy. 644 Dataset type or None for all dataset types. 645 level : `str` or None 646 Level or None for all levels or '' for the default level for the 652 Keys are strings usable in a dataset identifier, values are their 660 if datasetType
is None:
661 keyDict = copy.copy(self.
keyDict)
664 if level
is not None and level
in self.
levels:
665 keyDict = copy.copy(keyDict)
666 for l
in self.
levels[level]:
681 """Return the name of the camera that this CameraMapper is for.""" 683 className = className[className.find(
'.'):-1]
684 m = re.search(
r'(\w+)Mapper', className)
686 m = re.search(
r"class '[\w.]*?(\w+)'", className)
688 return name[:1].lower() + name[1:]
if name
else '' 692 """Return the name of the package containing this CameraMapper.""" 694 raise ValueError(
'class variable packageName must not be None')
699 """Return the base directory of this package""" 703 """Map a camera dataset.""" 705 raise RuntimeError(
"No camera dataset available.")
707 return dafPersist.ButlerLocation(
708 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
710 storageName=
"ConfigStorage",
718 """Return the (preloaded) camera object. 721 raise RuntimeError(
"No camera dataset available.")
725 return dafPersist.ButlerLocation(
726 pythonType=
"lsst.obs.base.ExposureIdInfo",
728 storageName=
"Internal",
729 locationList=
"ignored",
736 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 737 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
738 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
742 """Disable standardization for bfKernel 744 bfKernel is a calibration product that is numpy array, 745 unlike other calibration products that are all images; 746 all calibration images are sent through _standardizeExposure 747 due to CalibrationMapping, but we don't want that to happen to bfKernel 752 """Standardize a raw dataset by converting it to an Exposure instead 755 trimmed=
False, setVisitInfo=
True)
758 """Map a sky policy.""" 759 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
760 "Internal",
None,
None, self,
764 """Standardize a sky policy by returning the one we use.""" 765 return self.skypolicy
773 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
775 """Set up a registry (usually SQLite3), trying a number of possible 783 Description of registry (for log messages) 787 Policy that contains the registry name, used if path is None. 789 Key in policy for registry path. 790 storage : Storage subclass 791 Repository Storage to look in. 792 searchParents : bool, optional 793 True if the search for a registry should follow any Butler v1 795 posixIfNoSql : bool, optional 796 If an sqlite registry is not found, will create a posix registry if 801 lsst.daf.persistence.Registry 804 if path
is None and policyKey
in policy:
805 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
806 if os.path.isabs(path):
807 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
808 if not storage.exists(path):
809 newPath = storage.instanceSearch(path)
811 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 813 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
817 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
825 if path
and (path.startswith(root)):
826 path = path[len(root +
'/'):]
827 except AttributeError:
833 def search(filename, description):
834 """Search for file in storage 839 Filename to search for 841 Description of file, for error message. 845 path : `str` or `None` 846 Path to file, or None 848 result = storage.instanceSearch(filename)
851 self.
log.debug(
"Unable to locate %s: %s", description, filename)
856 path = search(
"%s.pgsql" % name,
"%s in root" % description)
858 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
860 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
863 if not storage.exists(path):
864 newPath = storage.instanceSearch(path)
865 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 866 if newPath
is not None:
868 localFileObj = storage.getLocalFile(path)
869 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
870 registry = dafPersist.Registry.create(localFileObj.name)
872 elif not registry
and posixIfNoSql:
874 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
875 registry = dafPersist.PosixRegistry(storage.root)
881 def _transformId(self, dataId):
882 """Generate a standard ID dict from a camera-specific ID dict. 884 Canonical keys include: 885 - amp: amplifier name 886 - ccd: CCD name (in LSST this is a combination of raft and sensor) 887 The default implementation returns a copy of its input. 892 Dataset identifier; this must not be modified 897 Transformed dataset identifier. 902 def _mapActualToPath(self, template, actualId):
903 """Convert a template path to an actual path, using the actual data 904 identifier. This implementation is usually sufficient but can be 905 overridden by the subclass. 922 return template % transformedId
923 except Exception
as e:
924 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
928 """Convert a CCD name to a form useful as a filename 930 The default implementation converts spaces to underscores. 932 return ccdName.replace(
" ",
"_")
934 def _extractDetectorName(self, dataId):
935 """Extract the detector (CCD) name from the dataset identifier. 937 The name in question is the detector name used by lsst.afw.cameraGeom. 949 raise NotImplementedError(
"No _extractDetectorName() function specified")
951 @deprecated(
"This method is no longer used for ISR (will be removed after v11)", category=FutureWarning)
952 def _extractAmpId(self, dataId):
953 """Extract the amplifier identifer from a dataset identifier. 955 .. note:: Deprecated in 11_0 957 amplifier identifier has two parts: the detector name for the CCD 958 containing the amplifier and index of the amplifier in the detector. 972 return (trDataId[
"ccd"], int(trDataId[
'amp']))
974 def _setAmpDetector(self, item, dataId, trimmed=True):
975 """Set the detector object in an Exposure for an amplifier. 977 Defects are also added to the Exposure based on the detector object. 981 item : `lsst.afw.image.Exposure` 982 Exposure to set the detector in. 986 Should detector be marked as trimmed? (ignored) 991 def _setCcdDetector(self, item, dataId, trimmed=True):
992 """Set the detector object in an Exposure for a CCD. 996 item : `lsst.afw.image.Exposure` 997 Exposure to set the detector in. 1001 Should detector be marked as trimmed? (ignored) 1003 if item.getDetector()
is not None:
1007 detector = self.
camera[detectorName]
1008 item.setDetector(detector)
1010 def _setFilter(self, mapping, item, dataId):
1011 """Set the filter object in an Exposure. If the Exposure had a FILTER 1012 keyword, this was already processed during load. But if it didn't, 1013 use the filter from the registry. 1017 mapping : `lsst.obs.base.Mapping` 1018 Where to get the filter from. 1019 item : `lsst.afw.image.Exposure` 1020 Exposure to set the filter in. 1025 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 1026 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1029 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1032 actualId = mapping.need([
'filter'], dataId)
1033 filterName = actualId[
'filter']
1035 filterName = self.
filters[filterName]
1037 item.setFilter(afwImage.Filter(filterName))
1038 except pexExcept.NotFoundError:
1039 self.
log.warn(
"Filter %s not defined. Set to UNKNOWN." % (filterName))
1041 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1042 trimmed=True, setVisitInfo=True):
1043 """Default standardization function for images. 1045 This sets the Detector from the camera geometry 1046 and optionally set the Filter. In both cases this saves 1047 having to persist some data in each exposure (or image). 1051 mapping : `lsst.obs.base.Mapping` 1052 Where to get the values from. 1053 item : image-like object 1054 Can be any of lsst.afw.image.Exposure, 1055 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1056 or lsst.afw.image.MaskedImage 1061 Set filter? Ignored if item is already an exposure 1063 Should detector be marked as trimmed? 1064 setVisitInfo : `bool` 1065 Should Exposure have its VisitInfo filled out from the metadata? 1069 `lsst.afw.image.Exposure` 1070 The standardized Exposure. 1074 setVisitInfo=setVisitInfo)
1075 except Exception
as e:
1076 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1079 if mapping.level.lower() ==
"amp":
1081 elif mapping.level.lower() ==
"ccd":
1087 if mapping.level.lower() !=
"amp" and exposure.getWcs()
is None and \
1088 (exposure.getInfo().getVisitInfo()
is not None or exposure.getMetadata().toDict()):
1096 def _createSkyWcsFromMetadata(self, exposure):
1097 """Create a SkyWcs from the FITS header metadata in an Exposure. 1101 exposure : `lsst.afw.image.Exposure` 1102 The exposure to get metadata from, and attach the SkyWcs to. 1104 metadata = exposure.getMetadata()
1106 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1107 exposure.setWcs(wcs)
1108 except pexExcept.TypeError
as e:
1110 self.
log.debug(
"wcs set to None; missing information found in metadata to create a valid wcs:" 1113 exposure.setMetadata(metadata)
1115 def _createInitialSkyWcs(self, exposure):
1116 """Create a SkyWcs from the boresight and camera geometry. 1118 If the boresight or camera geometry do not support this method of 1119 WCS creation, this falls back on the header metadata-based version 1120 (typically a purely linear FITS crval/crpix/cdmatrix WCS). 1124 exposure : `lsst.afw.image.Exposure` 1125 The exposure to get data from, and attach the SkyWcs to. 1130 if exposure.getInfo().getVisitInfo()
is None:
1131 msg =
"No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs." 1135 newSkyWcs =
createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1136 exposure.setWcs(newSkyWcs)
1137 except InitialSkyWcsError
as e:
1138 msg =
"Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s" 1139 self.
log.warn(msg, e)
1140 self.
log.debug(
"Exception was: %s", traceback.TracebackException.from_exception(e))
1141 if e.__context__
is not None:
1142 self.
log.debug(
"Root-cause Exception was: %s",
1143 traceback.TracebackException.from_exception(e.__context__))
1145 def _makeCamera(self, policy, repositoryDir):
1146 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 1149 Also set self.cameraDataLocation, if relevant (else it can be left 1152 This implementation assumes that policy contains an entry "camera" 1153 that points to the subdirectory in this package of camera data; 1154 specifically, that subdirectory must contain: 1155 - a file named `camera.py` that contains persisted camera config 1156 - ampInfo table FITS files, as required by 1157 lsst.afw.cameraGeom.makeCameraFromPath 1161 policy : `lsst.daf.persistence.Policy` 1162 Policy with per-camera defaults already merged 1163 (PexPolicy only for backward compatibility). 1164 repositoryDir : `str` 1165 Policy repository for the subclassing module (obtained with 1166 getRepositoryPath() on the per-camera default dictionary). 1168 if 'camera' not in policy:
1169 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1170 cameraDataSubdir = policy[
'camera']
1172 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1173 cameraConfig = afwCameraGeom.CameraConfig()
1176 return afwCameraGeom.makeCameraFromPath(
1177 cameraConfig=cameraConfig,
1178 ampInfoPath=ampInfoPath,
1184 """Get the registry used by this mapper. 1189 The registry used by this mapper for this mapper's repository. 1194 """Stuff image compression settings into a daf.base.PropertySet 1196 This goes into the ButlerLocation's "additionalData", which gets 1197 passed into the boost::persistence framework. 1202 Type of dataset for which to get the image compression settings. 1208 additionalData : `lsst.daf.base.PropertySet` 1209 Image compression settings. 1211 mapping = self.
mappings[datasetType]
1212 recipeName = mapping.recipe
1213 storageType = mapping.storage
1215 return dafBase.PropertySet()
1217 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1218 (datasetType, storageType, recipeName))
1219 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1220 seed = hash(tuple(dataId.items())) % 2**31
1221 for plane
in (
"image",
"mask",
"variance"):
1222 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1223 recipe.set(plane +
".scaling.seed", seed)
1226 def _initWriteRecipes(self):
1227 """Read the recipes for writing files 1229 These recipes are currently used for configuring FITS compression, 1230 but they could have wider uses for configuring different flavors 1231 of the storage types. A recipe is referred to by a symbolic name, 1232 which has associated settings. These settings are stored as a 1233 `PropertySet` so they can easily be passed down to the 1234 boost::persistence framework as the "additionalData" parameter. 1236 The list of recipes is written in YAML. A default recipe and 1237 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1238 and these may be overridden or supplemented by the individual obs_* 1239 packages' own policy/writeRecipes.yaml files. 1241 Recipes are grouped by the storage type. Currently, only the 1242 ``FitsStorage`` storage type uses recipes, which uses it to 1243 configure FITS image compression. 1245 Each ``FitsStorage`` recipe for FITS compression should define 1246 "image", "mask" and "variance" entries, each of which may contain 1247 "compression" and "scaling" entries. Defaults will be provided for 1248 any missing elements under "compression" and "scaling". 1250 The allowed entries under "compression" are: 1252 * algorithm (string): compression algorithm to use 1253 * rows (int): number of rows per tile (0 = entire dimension) 1254 * columns (int): number of columns per tile (0 = entire dimension) 1255 * quantizeLevel (float): cfitsio quantization level 1257 The allowed entries under "scaling" are: 1259 * algorithm (string): scaling algorithm to use 1260 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1261 * fuzz (bool): fuzz the values when quantising floating-point values? 1262 * seed (long): seed for random number generator when fuzzing 1263 * maskPlanes (list of string): mask planes to ignore when doing 1265 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1266 * quantizePad: number of stdev to allow on the low side (for 1267 STDEV_POSITIVE/NEGATIVE) 1268 * bscale: manually specified BSCALE (for MANUAL scaling) 1269 * bzero: manually specified BSCALE (for MANUAL scaling) 1271 A very simple example YAML recipe: 1277 algorithm: GZIP_SHUFFLE 1281 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1282 recipes = dafPersist.Policy(recipesFile)
1283 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1284 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1285 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1286 supplements = dafPersist.Policy(supplementsFile)
1288 for entry
in validationMenu:
1289 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1291 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1292 (supplementsFile, entry, recipesFile, intersection))
1293 recipes.update(supplements)
1296 for storageType
in recipes.names(
True):
1297 if "default" not in recipes[storageType]:
1298 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1299 (storageType, recipesFile))
1300 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1304 """Generate an Exposure from an image-like object 1306 If the image is a DecoratedImage then also set its WCS and metadata 1307 (Image and MaskedImage are missing the necessary metadata 1308 and Exposure already has those set) 1312 image : Image-like object 1313 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1318 `lsst.afw.image.Exposure` 1319 Exposure containing input image. 1322 if isinstance(image, afwImage.MaskedImage):
1323 exposure = afwImage.makeExposure(image)
1324 elif isinstance(image, afwImage.DecoratedImage):
1325 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1326 metadata = image.getMetadata()
1327 exposure.setMetadata(metadata)
1328 elif isinstance(image, afwImage.Exposure):
1330 metadata = exposure.getMetadata()
1332 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1335 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1336 if metadata
is not None:
1339 logger = lsstLog.Log.getLogger(
"CameraMapper")
1340 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1342 exposureId = mapper._computeCcdExposureId(dataId)
1343 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1345 exposure.getInfo().setVisitInfo(visitInfo)
1351 """Validate recipes for FitsStorage 1353 The recipes are supplemented with default values where appropriate. 1355 TODO: replace this custom validation code with Cerberus (DM-11846) 1359 recipes : `lsst.daf.persistence.Policy` 1360 FitsStorage recipes to validate. 1364 validated : `lsst.daf.base.PropertySet` 1365 Validated FitsStorage recipe. 1370 If validation fails. 1374 compressionSchema = {
1375 "algorithm":
"NONE",
1378 "quantizeLevel": 0.0,
1381 "algorithm":
"NONE",
1383 "maskPlanes": [
"NO_DATA"],
1385 "quantizeLevel": 4.0,
1392 def checkUnrecognized(entry, allowed, description):
1393 """Check to see if the entry contains unrecognised keywords""" 1394 unrecognized = set(entry.keys()) - set(allowed)
1397 "Unrecognized entries when parsing image compression recipe %s: %s" %
1398 (description, unrecognized))
1401 for name
in recipes.names(
True):
1402 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1403 rr = dafBase.PropertySet()
1404 validated[name] = rr
1405 for plane
in (
"image",
"mask",
"variance"):
1406 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1407 name +
"->" + plane)
1409 for settings, schema
in ((
"compression", compressionSchema),
1410 (
"scaling", scalingSchema)):
1411 prefix = plane +
"." + settings
1412 if settings
not in recipes[name][plane]:
1414 rr.set(prefix +
"." + key, schema[key])
1416 entry = recipes[name][plane][settings]
1417 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1419 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1420 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _createInitialSkyWcs(self, exposure)
def _setCcdDetector(self, item, dataId, trimmed=True)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def _createSkyWcsFromMetadata(self, exposure)
def createInitialSkyWcs(visitInfo, detector, flipX=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
Utility functions.
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)