28 from astro_metadata_translator
import fix_header
30 import lsst.daf.persistence
as dafPersist
31 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
32 import lsst.daf.base
as dafBase
33 import lsst.afw.geom
as afwGeom
34 import lsst.afw.image
as afwImage
35 import lsst.afw.table
as afwTable
36 from lsst.afw.fits
import readMetadata
37 import lsst.afw.cameraGeom
as afwCameraGeom
38 import lsst.log
as lsstLog
40 from .exposureIdInfo
import ExposureIdInfo
41 from .makeRawVisitInfo
import MakeRawVisitInfo
42 from .utils
import createInitialSkyWcs, InitialSkyWcsError
44 from ._instrument
import Instrument
46 __all__ = [
"CameraMapper",
"exposureFromImage"]
51 """CameraMapper is a base class for mappers that handle images from a
52 camera and products derived from them. This provides an abstraction layer
53 between the data on disk and the code.
55 Public methods: keys, queryMetadata, getDatasetTypes, map,
56 canStandardize, standardize
58 Mappers for specific data sources (e.g., CFHT Megacam, LSST
59 simulations, etc.) should inherit this class.
61 The CameraMapper manages datasets within a "root" directory. Note that
62 writing to a dataset present in the input root will hide the existing
63 dataset but not overwrite it. See #2160 for design discussion.
65 A camera is assumed to consist of one or more rafts, each composed of
66 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
67 (amps). A camera is also assumed to have a camera geometry description
68 (CameraGeom object) as a policy file, a filter description (Filter class
69 static configuration) as another policy file.
71 Information from the camera geometry and defects are inserted into all
72 Exposure objects returned.
74 The mapper uses one or two registries to retrieve metadata about the
75 images. The first is a registry of all raw exposures. This must contain
76 the time of the observation. One or more tables (or the equivalent)
77 within the registry are used to look up data identifier components that
78 are not specified by the user (e.g. filter) and to return results for
79 metadata queries. The second is an optional registry of all calibration
80 data. This should contain validity start and end entries for each
81 calibration dataset in the same timescale as the observation time.
83 Subclasses will typically set MakeRawVisitInfoClass and optionally the
84 metadata translator class:
86 MakeRawVisitInfoClass: a class variable that points to a subclass of
87 MakeRawVisitInfo, a functor that creates an
88 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
90 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
91 class to use for fixing metadata values. If it is not set an attempt
92 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
93 that the metadata fixup will try to infer the translator class from the
96 Subclasses must provide the following methods:
98 _extractDetectorName(self, dataId): returns the detector name for a CCD
99 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
100 a dataset identifier referring to that CCD or a subcomponent of it.
102 _computeCcdExposureId(self, dataId): see below
104 _computeCoaddExposureId(self, dataId, singleFilter): see below
106 Subclasses may also need to override the following methods:
108 _transformId(self, dataId): transformation of a data identifier
109 from colloquial usage (e.g., "ccdname") to proper/actual usage
110 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
111 commas). The default implementation does nothing. Note that this
112 method should not modify its input parameter.
114 getShortCcdName(self, ccdName): a static method that returns a shortened
115 name suitable for use as a filename. The default version converts spaces
118 _mapActualToPath(self, template, actualId): convert a template path to an
119 actual path, using the actual dataset identifier.
121 The mapper's behaviors are largely specified by the policy file.
122 See the MapperDictionary.paf for descriptions of the available items.
124 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
125 mappings (see Mappings class).
127 Common default mappings for all subclasses can be specified in the
128 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
129 provides a simple way to add a product to all camera mappers.
131 Functions to map (provide a path to the data given a dataset
132 identifier dictionary) and standardize (convert data into some standard
133 format or type) may be provided in the subclass as "map_{dataset type}"
134 and "std_{dataset type}", respectively.
136 If non-Exposure datasets cannot be retrieved using standard
137 daf_persistence methods alone, a "bypass_{dataset type}" function may be
138 provided in the subclass to return the dataset instead of using the
139 "datasets" subpolicy.
141 Implementations of map_camera and bypass_camera that should typically be
142 sufficient are provided in this base class.
148 Instead of auto-loading the camera at construction time, load it from
149 the calibration registry
153 policy : daf_persistence.Policy,
154 Policy with per-camera defaults already merged.
155 repositoryDir : string
156 Policy repository for the subclassing module (obtained with
157 getRepositoryPath() on the per-camera default dictionary).
158 root : string, optional
159 Path to the root directory for data.
160 registry : string, optional
161 Path to registry with data's metadata.
162 calibRoot : string, optional
163 Root directory for calibrations.
164 calibRegistry : string, optional
165 Path to registry with calibrations' metadata.
166 provided : list of string, optional
167 Keys provided by the mapper.
168 parentRegistry : Registry subclass, optional
169 Registry from a parent repository that may be used to look up
171 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
172 The configuration information for the repository this mapper is
179 MakeRawVisitInfoClass = MakeRawVisitInfo
182 PupilFactoryClass = afwCameraGeom.PupilFactory
185 translatorClass =
None
189 _gen3instrument =
None
192 root=None, registry=None, calibRoot=None, calibRegistry=None,
193 provided=None, parentRegistry=None, repositoryCfg=None):
195 dafPersist.Mapper.__init__(self)
197 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
202 self.
root = repositoryCfg.root
206 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None
207 if repoPolicy
is not None:
208 policy.update(repoPolicy)
212 if 'levels' in policy:
213 levelsPolicy = policy[
'levels']
214 for key
in levelsPolicy.names(
True):
215 self.
levels[key] = set(levelsPolicy.asArray(key))
218 if 'defaultSubLevels' in policy:
224 root = dafPersist.LogicalLocation(root).locString()
226 self.
rootStorage = dafPersist.Storage.makeFromURI(uri=root)
234 if calibRoot
is not None:
235 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
236 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
239 calibRoot = policy.get(
'calibRoot',
None)
241 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
243 if calibStorage
is None:
251 posixIfNoSql=(
not parentRegistry))
254 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
255 if needCalibRegistry:
258 "calibRegistryPath", calibStorage,
262 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
263 f
"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
283 raise ValueError(
'class variable packageName must not be None')
293 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
294 """Initialize mappings
296 For each of the dataset types that we want to be able to read, there
297 are methods that can be created to support them:
298 * map_<dataset> : determine the path for dataset
299 * std_<dataset> : standardize the retrieved dataset
300 * bypass_<dataset> : retrieve the dataset (bypassing the usual
302 * query_<dataset> : query the registry
304 Besides the dataset types explicitly listed in the policy, we create
305 additional, derived datasets for additional conveniences,
306 e.g., reading the header of an image, retrieving only the size of a
311 policy : `lsst.daf.persistence.Policy`
312 Policy with per-camera defaults already merged
313 rootStorage : `Storage subclass instance`
314 Interface to persisted repository data.
315 calibRoot : `Storage subclass instance`
316 Interface to persisted calib repository data
317 provided : `list` of `str`
318 Keys provided by the mapper
321 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
323 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
325 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
326 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
327 dsMappingPolicy = dafPersist.Policy()
331 (
"images", imgMappingPolicy, ImageMapping),
332 (
"exposures", expMappingPolicy, ExposureMapping),
333 (
"calibrations", calMappingPolicy, CalibrationMapping),
334 (
"datasets", dsMappingPolicy, DatasetMapping)
337 for name, defPolicy, cls
in mappingList:
339 datasets = policy[name]
342 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
343 if os.path.exists(defaultsPath):
344 datasets.merge(dafPersist.Policy(defaultsPath))
347 setattr(self, name, mappings)
348 for datasetType
in datasets.names(
True):
349 subPolicy = datasets[datasetType]
350 subPolicy.merge(defPolicy)
352 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
353 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
354 subPolicy=subPolicy):
355 components = subPolicy.get(
'composite')
356 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None
357 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None
358 python = subPolicy[
'python']
359 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
360 disassembler=disassembler,
364 for name, component
in components.items():
365 butlerComposite.add(id=name,
366 datasetType=component.get(
'datasetType'),
367 setter=component.get(
'setter',
None),
368 getter=component.get(
'getter',
None),
369 subset=component.get(
'subset',
False),
370 inputOnly=component.get(
'inputOnly',
False))
371 return butlerComposite
372 setattr(self,
"map_" + datasetType, compositeClosure)
376 if name ==
"calibrations":
378 provided=provided, dataRoot=rootStorage)
380 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
383 raise ValueError(f
"Duplicate mapping policy for dataset type {datasetType}")
384 self.
keyDict.update(mapping.keys())
385 mappings[datasetType] = mapping
386 self.
mappings[datasetType] = mapping
387 if not hasattr(self,
"map_" + datasetType):
388 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
389 return mapping.map(mapper, dataId, write)
390 setattr(self,
"map_" + datasetType, mapClosure)
391 if not hasattr(self,
"query_" + datasetType):
392 def queryClosure(format, dataId, mapping=mapping):
393 return mapping.lookup(format, dataId)
394 setattr(self,
"query_" + datasetType, queryClosure)
395 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
396 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
397 return mapping.standardize(mapper, item, dataId)
398 setattr(self,
"std_" + datasetType, stdClosure)
400 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
401 """Set convenience methods on CameraMapper"""
402 mapName =
"map_" + datasetType +
"_" + suffix
403 bypassName =
"bypass_" + datasetType +
"_" + suffix
404 queryName =
"query_" + datasetType +
"_" + suffix
405 if not hasattr(self, mapName):
406 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
407 if not hasattr(self, bypassName):
408 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
409 bypassImpl = getattr(self,
"bypass_" + datasetType)
410 if bypassImpl
is not None:
411 setattr(self, bypassName, bypassImpl)
412 if not hasattr(self, queryName):
413 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
416 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
417 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
419 if subPolicy[
"storage"] ==
"FitsStorage":
420 def getMetadata(datasetType, pythonType, location, dataId):
421 md = readMetadata(location.getLocationsWithRoot()[0])
425 setMethods(
"md", bypassImpl=getMetadata)
428 addName =
"add_" + datasetType
429 if not hasattr(self, addName):
432 if name ==
"exposures":
433 def getSkyWcs(datasetType, pythonType, location, dataId):
434 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
435 return fitsReader.readWcs()
437 setMethods(
"wcs", bypassImpl=getSkyWcs)
439 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
440 """Create a SkyWcs from the un-modified raw FITS WCS header keys."""
441 if datasetType[:3] !=
"raw":
442 raise dafPersist.NoResults(
"Can only get header WCS for raw exposures.",
444 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
446 setMethods(
"header_wcs", bypassImpl=getRawHeaderWcs)
448 def getPhotoCalib(datasetType, pythonType, location, dataId):
449 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
450 return fitsReader.readPhotoCalib()
452 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
454 def getVisitInfo(datasetType, pythonType, location, dataId):
455 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
456 return fitsReader.readVisitInfo()
458 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
460 def getFilter(datasetType, pythonType, location, dataId):
461 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
462 return fitsReader.readFilter()
464 setMethods(
"filter", bypassImpl=getFilter)
466 setMethods(
"detector",
467 mapImpl=
lambda dataId, write=
False:
468 dafPersist.ButlerLocation(
469 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
471 storageName=
"Internal",
472 locationList=
"ignored",
477 bypassImpl=
lambda datasetType, pythonType, location, dataId:
481 def getBBox(datasetType, pythonType, location, dataId):
482 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
484 return afwImage.bboxFromMetadata(md)
486 setMethods(
"bbox", bypassImpl=getBBox)
488 elif name ==
"images":
489 def getBBox(datasetType, pythonType, location, dataId):
490 md = readMetadata(location.getLocationsWithRoot()[0])
492 return afwImage.bboxFromMetadata(md)
493 setMethods(
"bbox", bypassImpl=getBBox)
495 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
497 def getMetadata(datasetType, pythonType, location, dataId):
498 md = readMetadata(os.path.join(location.getStorage().root,
499 location.getLocations()[0]), hdu=1)
503 setMethods(
"md", bypassImpl=getMetadata)
506 if subPolicy[
"storage"] ==
"FitsStorage":
507 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
508 subId = dataId.copy()
510 loc = mapping.map(mapper, subId, write)
511 bbox = dataId[
'bbox']
512 llcX = bbox.getMinX()
513 llcY = bbox.getMinY()
514 width = bbox.getWidth()
515 height = bbox.getHeight()
516 loc.additionalData.set(
'llcX', llcX)
517 loc.additionalData.set(
'llcY', llcY)
518 loc.additionalData.set(
'width', width)
519 loc.additionalData.set(
'height', height)
520 if 'imageOrigin' in dataId:
521 loc.additionalData.set(
'imageOrigin',
522 dataId[
'imageOrigin'])
525 def querySubClosure(key, format, dataId, mapping=mapping):
526 subId = dataId.copy()
528 return mapping.lookup(format, subId)
529 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
531 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
534 def getLen(datasetType, pythonType, location, dataId):
535 md = readMetadata(os.path.join(location.getStorage().root,
536 location.getLocations()[0]), hdu=1)
540 setMethods(
"len", bypassImpl=getLen)
543 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
544 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
545 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
546 location.getLocations()[0])))
548 def _computeCcdExposureId(self, dataId):
549 """Compute the 64-bit (long) identifier for a CCD exposure.
551 Subclasses must override
556 Data identifier with visit, ccd.
558 raise NotImplementedError()
560 def _computeCoaddExposureId(self, dataId, singleFilter):
561 """Compute the 64-bit (long) identifier for a coadd.
563 Subclasses must override
568 Data identifier with tract and patch.
569 singleFilter : `bool`
570 True means the desired ID is for a single-filter coadd, in which
571 case dataIdmust contain filter.
573 raise NotImplementedError()
575 def _search(self, path):
576 """Search for path in the associated repository's storage.
581 Path that describes an object in the repository associated with
583 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
584 indicator will be stripped when searching and so will match
585 filenames without the HDU indicator, e.g. 'foo.fits'. The path
586 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
591 The path for this object in the repository. Will return None if the
592 object can't be found. If the input argument path contained an HDU
593 indicator, the returned path will also contain the HDU indicator.
598 """Rename any existing object with the given type and dataId.
600 The CameraMapper implementation saves objects in a sequence of e.g.:
606 All of the backups will be placed in the output repo, however, and will
607 not be removed if they are found elsewhere in the _parent chain. This
608 means that the same file will be stored twice if the previous version
609 was found in an input repo.
618 def firstElement(list):
619 """Get the first element in the list, or None if that can't be
622 return list[0]
if list
is not None and len(list)
else None
625 newLocation = self.map(datasetType, dataId, write=
True)
626 newPath = newLocation.getLocations()[0]
627 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
628 path = firstElement(path)
630 while path
is not None:
632 oldPaths.append((n, path))
633 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
634 path = firstElement(path)
635 for n, oldPath
in reversed(oldPaths):
636 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
639 """Return supported keys.
644 List of keys usable in a dataset identifier
649 """Return a dict of supported keys and their value types for a given
650 dataset type at a given level of the key hierarchy.
655 Dataset type or None for all dataset types.
656 level : `str` or None
657 Level or None for all levels or '' for the default level for the
663 Keys are strings usable in a dataset identifier, values are their
671 if datasetType
is None:
672 keyDict = copy.copy(self.
keyDict)
675 if level
is not None and level
in self.
levels:
676 keyDict = copy.copy(keyDict)
677 for lev
in self.
levels[level]:
692 """Return the name of the camera that this CameraMapper is for."""
694 className = className[className.find(
'.'):-1]
695 m = re.search(
r'(\w+)Mapper', className)
697 m = re.search(
r"class '[\w.]*?(\w+)'", className)
699 return name[:1].lower() + name[1:]
if name
else ''
703 """Return the name of the package containing this CameraMapper."""
705 raise ValueError(
'class variable packageName must not be None')
710 """Return the gen3 Instrument class equivalent for this gen2 Mapper.
715 A `~lsst.obs.base.Instrument` class.
718 raise NotImplementedError(
"Please provide a specific implementation for your instrument"
719 " to enable conversion of this gen2 repository to gen3")
724 raise ValueError(f
"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}"
725 " but that is not an lsst.obs.base.Instrument")
730 """Return the base directory of this package"""
734 """Map a camera dataset."""
736 raise RuntimeError(
"No camera dataset available.")
738 return dafPersist.ButlerLocation(
739 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
741 storageName=
"ConfigStorage",
749 """Return the (preloaded) camera object.
752 raise RuntimeError(
"No camera dataset available.")
756 return dafPersist.ButlerLocation(
757 pythonType=
"lsst.obs.base.ExposureIdInfo",
759 storageName=
"Internal",
760 locationList=
"ignored",
767 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
768 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
769 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
773 """Disable standardization for bfKernel
775 bfKernel is a calibration product that is numpy array,
776 unlike other calibration products that are all images;
777 all calibration images are sent through _standardizeExposure
778 due to CalibrationMapping, but we don't want that to happen to bfKernel
783 """Standardize a raw dataset by converting it to an Exposure instead
786 trimmed=
False, setVisitInfo=
True)
789 """Map a sky policy."""
790 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
791 "Internal",
None,
None, self,
795 """Standardize a sky policy by returning the one we use."""
796 return self.skypolicy
804 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
806 """Set up a registry (usually SQLite3), trying a number of possible
814 Description of registry (for log messages)
818 Policy that contains the registry name, used if path is None.
820 Key in policy for registry path.
821 storage : Storage subclass
822 Repository Storage to look in.
823 searchParents : bool, optional
824 True if the search for a registry should follow any Butler v1
826 posixIfNoSql : bool, optional
827 If an sqlite registry is not found, will create a posix registry if
832 lsst.daf.persistence.Registry
835 if path
is None and policyKey
in policy:
836 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
837 if os.path.isabs(path):
838 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
839 if not storage.exists(path):
840 newPath = storage.instanceSearch(path)
842 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None
844 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
848 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
856 if path
and (path.startswith(root)):
857 path = path[len(root +
'/'):]
858 except AttributeError:
864 def search(filename, description):
865 """Search for file in storage
870 Filename to search for
872 Description of file, for error message.
876 path : `str` or `None`
877 Path to file, or None
879 result = storage.instanceSearch(filename)
882 self.
log.debug(
"Unable to locate %s: %s", description, filename)
887 path = search(
"%s.pgsql" % name,
"%s in root" % description)
889 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
891 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
894 if not storage.exists(path):
895 newPath = storage.instanceSearch(path)
896 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None
897 if newPath
is not None:
899 localFileObj = storage.getLocalFile(path)
900 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
901 registry = dafPersist.Registry.create(localFileObj.name)
903 elif not registry
and posixIfNoSql:
905 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
906 registry = dafPersist.PosixRegistry(storage.root)
912 def _transformId(self, dataId):
913 """Generate a standard ID dict from a camera-specific ID dict.
915 Canonical keys include:
916 - amp: amplifier name
917 - ccd: CCD name (in LSST this is a combination of raft and sensor)
918 The default implementation returns a copy of its input.
923 Dataset identifier; this must not be modified
928 Transformed dataset identifier.
933 def _mapActualToPath(self, template, actualId):
934 """Convert a template path to an actual path, using the actual data
935 identifier. This implementation is usually sufficient but can be
936 overridden by the subclass.
953 return template % transformedId
954 except Exception
as e:
955 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
959 """Convert a CCD name to a form useful as a filename
961 The default implementation converts spaces to underscores.
963 return ccdName.replace(
" ",
"_")
965 def _extractDetectorName(self, dataId):
966 """Extract the detector (CCD) name from the dataset identifier.
968 The name in question is the detector name used by lsst.afw.cameraGeom.
980 raise NotImplementedError(
"No _extractDetectorName() function specified")
982 def _setAmpDetector(self, item, dataId, trimmed=True):
983 """Set the detector object in an Exposure for an amplifier.
985 Defects are also added to the Exposure based on the detector object.
989 item : `lsst.afw.image.Exposure`
990 Exposure to set the detector in.
994 Should detector be marked as trimmed? (ignored)
999 def _setCcdDetector(self, item, dataId, trimmed=True):
1000 """Set the detector object in an Exposure for a CCD.
1004 item : `lsst.afw.image.Exposure`
1005 Exposure to set the detector in.
1009 Should detector be marked as trimmed? (ignored)
1011 if item.getDetector()
is not None:
1015 detector = self.
camera[detectorName]
1016 item.setDetector(detector)
1018 def _setFilter(self, mapping, item, dataId):
1019 """Set the filter object in an Exposure. If the Exposure had a FILTER
1020 keyword, this was already processed during load. But if it didn't,
1021 use the filter from the registry.
1025 mapping : `lsst.obs.base.Mapping`
1026 Where to get the filter from.
1027 item : `lsst.afw.image.Exposure`
1028 Exposure to set the filter in.
1033 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
1034 or isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1037 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1040 actualId = mapping.need([
'filter'], dataId)
1041 filterName = actualId[
'filter']
1043 filterName = self.
filters[filterName]
1045 item.setFilter(afwImage.Filter(filterName))
1046 except pexExcept.NotFoundError:
1047 self.
log.warn(
"Filter %s not defined. Set to UNKNOWN." % (filterName))
1049 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1050 trimmed=True, setVisitInfo=True):
1051 """Default standardization function for images.
1053 This sets the Detector from the camera geometry
1054 and optionally set the Filter. In both cases this saves
1055 having to persist some data in each exposure (or image).
1059 mapping : `lsst.obs.base.Mapping`
1060 Where to get the values from.
1061 item : image-like object
1062 Can be any of lsst.afw.image.Exposure,
1063 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1064 or lsst.afw.image.MaskedImage
1069 Set filter? Ignored if item is already an exposure
1071 Should detector be marked as trimmed?
1072 setVisitInfo : `bool`
1073 Should Exposure have its VisitInfo filled out from the metadata?
1077 `lsst.afw.image.Exposure`
1078 The standardized Exposure.
1082 setVisitInfo=setVisitInfo)
1083 except Exception
as e:
1084 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1087 if mapping.level.lower() ==
"amp":
1089 elif mapping.level.lower() ==
"ccd":
1095 if mapping.level.lower() !=
"amp" and exposure.getWcs()
is None and \
1096 (exposure.getInfo().getVisitInfo()
is not None or exposure.getMetadata().toDict()):
1104 def _createSkyWcsFromMetadata(self, exposure):
1105 """Create a SkyWcs from the FITS header metadata in an Exposure.
1109 exposure : `lsst.afw.image.Exposure`
1110 The exposure to get metadata from, and attach the SkyWcs to.
1112 metadata = exposure.getMetadata()
1115 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1116 exposure.setWcs(wcs)
1117 except pexExcept.TypeError
as e:
1119 self.
log.debug(
"wcs set to None; missing information found in metadata to create a valid wcs:"
1122 exposure.setMetadata(metadata)
1124 def _createInitialSkyWcs(self, exposure):
1125 """Create a SkyWcs from the boresight and camera geometry.
1127 If the boresight or camera geometry do not support this method of
1128 WCS creation, this falls back on the header metadata-based version
1129 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1133 exposure : `lsst.afw.image.Exposure`
1134 The exposure to get data from, and attach the SkyWcs to.
1139 if exposure.getInfo().getVisitInfo()
is None:
1140 msg =
"No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1144 newSkyWcs =
createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1145 exposure.setWcs(newSkyWcs)
1146 except InitialSkyWcsError
as e:
1147 msg =
"Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1148 self.
log.warn(msg, e)
1149 self.
log.debug(
"Exception was: %s", traceback.TracebackException.from_exception(e))
1150 if e.__context__
is not None:
1151 self.
log.debug(
"Root-cause Exception was: %s",
1152 traceback.TracebackException.from_exception(e.__context__))
1154 def _makeCamera(self, policy, repositoryDir):
1155 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1158 Also set self.cameraDataLocation, if relevant (else it can be left
1161 This implementation assumes that policy contains an entry "camera"
1162 that points to the subdirectory in this package of camera data;
1163 specifically, that subdirectory must contain:
1164 - a file named `camera.py` that contains persisted camera config
1165 - ampInfo table FITS files, as required by
1166 lsst.afw.cameraGeom.makeCameraFromPath
1170 policy : `lsst.daf.persistence.Policy`
1171 Policy with per-camera defaults already merged
1172 (PexPolicy only for backward compatibility).
1173 repositoryDir : `str`
1174 Policy repository for the subclassing module (obtained with
1175 getRepositoryPath() on the per-camera default dictionary).
1177 if 'camera' not in policy:
1178 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1179 cameraDataSubdir = policy[
'camera']
1181 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1182 cameraConfig = afwCameraGeom.CameraConfig()
1185 return afwCameraGeom.makeCameraFromPath(
1186 cameraConfig=cameraConfig,
1187 ampInfoPath=ampInfoPath,
1193 """Get the registry used by this mapper.
1198 The registry used by this mapper for this mapper's repository.
1203 """Stuff image compression settings into a daf.base.PropertySet
1205 This goes into the ButlerLocation's "additionalData", which gets
1206 passed into the boost::persistence framework.
1211 Type of dataset for which to get the image compression settings.
1217 additionalData : `lsst.daf.base.PropertySet`
1218 Image compression settings.
1220 mapping = self.
mappings[datasetType]
1221 recipeName = mapping.recipe
1222 storageType = mapping.storage
1224 return dafBase.PropertySet()
1226 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1227 (datasetType, storageType, recipeName))
1228 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1229 seed = hash(tuple(dataId.items())) % 2**31
1230 for plane
in (
"image",
"mask",
"variance"):
1231 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1232 recipe.set(plane +
".scaling.seed", seed)
1235 def _initWriteRecipes(self):
1236 """Read the recipes for writing files
1238 These recipes are currently used for configuring FITS compression,
1239 but they could have wider uses for configuring different flavors
1240 of the storage types. A recipe is referred to by a symbolic name,
1241 which has associated settings. These settings are stored as a
1242 `PropertySet` so they can easily be passed down to the
1243 boost::persistence framework as the "additionalData" parameter.
1245 The list of recipes is written in YAML. A default recipe and
1246 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1247 and these may be overridden or supplemented by the individual obs_*
1248 packages' own policy/writeRecipes.yaml files.
1250 Recipes are grouped by the storage type. Currently, only the
1251 ``FitsStorage`` storage type uses recipes, which uses it to
1252 configure FITS image compression.
1254 Each ``FitsStorage`` recipe for FITS compression should define
1255 "image", "mask" and "variance" entries, each of which may contain
1256 "compression" and "scaling" entries. Defaults will be provided for
1257 any missing elements under "compression" and "scaling".
1259 The allowed entries under "compression" are:
1261 * algorithm (string): compression algorithm to use
1262 * rows (int): number of rows per tile (0 = entire dimension)
1263 * columns (int): number of columns per tile (0 = entire dimension)
1264 * quantizeLevel (float): cfitsio quantization level
1266 The allowed entries under "scaling" are:
1268 * algorithm (string): scaling algorithm to use
1269 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1270 * fuzz (bool): fuzz the values when quantising floating-point values?
1271 * seed (long): seed for random number generator when fuzzing
1272 * maskPlanes (list of string): mask planes to ignore when doing
1274 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1275 * quantizePad: number of stdev to allow on the low side (for
1276 STDEV_POSITIVE/NEGATIVE)
1277 * bscale: manually specified BSCALE (for MANUAL scaling)
1278 * bzero: manually specified BSCALE (for MANUAL scaling)
1280 A very simple example YAML recipe:
1286 algorithm: GZIP_SHUFFLE
1290 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1291 recipes = dafPersist.Policy(recipesFile)
1292 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1293 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1294 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1295 supplements = dafPersist.Policy(supplementsFile)
1297 for entry
in validationMenu:
1298 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1300 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1301 (supplementsFile, entry, recipesFile, intersection))
1302 recipes.update(supplements)
1305 for storageType
in recipes.names(
True):
1306 if "default" not in recipes[storageType]:
1307 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1308 (storageType, recipesFile))
1309 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1313 """Generate an Exposure from an image-like object
1315 If the image is a DecoratedImage then also set its WCS and metadata
1316 (Image and MaskedImage are missing the necessary metadata
1317 and Exposure already has those set)
1321 image : Image-like object
1322 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1327 `lsst.afw.image.Exposure`
1328 Exposure containing input image.
1330 translatorClass =
None
1331 if mapper
is not None:
1332 translatorClass = mapper.translatorClass
1335 if isinstance(image, afwImage.MaskedImage):
1336 exposure = afwImage.makeExposure(image)
1337 elif isinstance(image, afwImage.DecoratedImage):
1338 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1339 metadata = image.getMetadata()
1340 fix_header(metadata, translator_class=translatorClass)
1341 exposure.setMetadata(metadata)
1342 elif isinstance(image, afwImage.Exposure):
1344 metadata = exposure.getMetadata()
1345 fix_header(metadata, translator_class=translatorClass)
1347 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1350 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1351 if metadata
is not None:
1354 logger = lsstLog.Log.getLogger(
"CameraMapper")
1355 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1357 exposureId = mapper._computeCcdExposureId(dataId)
1358 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1360 exposure.getInfo().setVisitInfo(visitInfo)
1366 """Validate recipes for FitsStorage
1368 The recipes are supplemented with default values where appropriate.
1370 TODO: replace this custom validation code with Cerberus (DM-11846)
1374 recipes : `lsst.daf.persistence.Policy`
1375 FitsStorage recipes to validate.
1379 validated : `lsst.daf.base.PropertySet`
1380 Validated FitsStorage recipe.
1385 If validation fails.
1389 compressionSchema = {
1390 "algorithm":
"NONE",
1393 "quantizeLevel": 0.0,
1396 "algorithm":
"NONE",
1398 "maskPlanes": [
"NO_DATA"],
1400 "quantizeLevel": 4.0,
1407 def checkUnrecognized(entry, allowed, description):
1408 """Check to see if the entry contains unrecognised keywords"""
1409 unrecognized = set(entry.keys()) - set(allowed)
1412 "Unrecognized entries when parsing image compression recipe %s: %s" %
1413 (description, unrecognized))
1416 for name
in recipes.names(
True):
1417 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1418 rr = dafBase.PropertySet()
1419 validated[name] = rr
1420 for plane
in (
"image",
"mask",
"variance"):
1421 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1422 name +
"->" + plane)
1424 for settings, schema
in ((
"compression", compressionSchema),
1425 (
"scaling", scalingSchema)):
1426 prefix = plane +
"." + settings
1427 if settings
not in recipes[name][plane]:
1429 rr.set(prefix +
"." + key, schema[key])
1431 entry = recipes[name][plane][settings]
1432 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1434 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1435 rr.set(prefix +
"." + key, value)