28 from astro_metadata_translator
import fix_header
30 import lsst.daf.persistence
as dafPersist
31 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
32 import lsst.daf.base
as dafBase
33 import lsst.afw.geom
as afwGeom
34 import lsst.afw.image
as afwImage
35 import lsst.afw.table
as afwTable
36 from lsst.afw.fits
import readMetadata
37 import lsst.afw.cameraGeom
as afwCameraGeom
38 import lsst.log
as lsstLog
40 from .exposureIdInfo
import ExposureIdInfo
41 from .makeRawVisitInfo
import MakeRawVisitInfo
42 from .utils
import createInitialSkyWcs, InitialSkyWcsError
44 from ._instrument
import Instrument
46 __all__ = [
"CameraMapper",
"exposureFromImage"]
51 """CameraMapper is a base class for mappers that handle images from a
52 camera and products derived from them. This provides an abstraction layer
53 between the data on disk and the code.
55 Public methods: keys, queryMetadata, getDatasetTypes, map,
56 canStandardize, standardize
58 Mappers for specific data sources (e.g., CFHT Megacam, LSST
59 simulations, etc.) should inherit this class.
61 The CameraMapper manages datasets within a "root" directory. Note that
62 writing to a dataset present in the input root will hide the existing
63 dataset but not overwrite it. See #2160 for design discussion.
65 A camera is assumed to consist of one or more rafts, each composed of
66 multiple CCDs. Each CCD is in turn composed of one or more amplifiers
67 (amps). A camera is also assumed to have a camera geometry description
68 (CameraGeom object) as a policy file, a filter description (Filter class
69 static configuration) as another policy file.
71 Information from the camera geometry and defects are inserted into all
72 Exposure objects returned.
74 The mapper uses one or two registries to retrieve metadata about the
75 images. The first is a registry of all raw exposures. This must contain
76 the time of the observation. One or more tables (or the equivalent)
77 within the registry are used to look up data identifier components that
78 are not specified by the user (e.g. filter) and to return results for
79 metadata queries. The second is an optional registry of all calibration
80 data. This should contain validity start and end entries for each
81 calibration dataset in the same timescale as the observation time.
83 Subclasses will typically set MakeRawVisitInfoClass and optionally the
84 metadata translator class:
86 MakeRawVisitInfoClass: a class variable that points to a subclass of
87 MakeRawVisitInfo, a functor that creates an
88 lsst.afw.image.VisitInfo from the FITS metadata of a raw image.
90 translatorClass: The `~astro_metadata_translator.MetadataTranslator`
91 class to use for fixing metadata values. If it is not set an attempt
92 will be made to infer the class from ``MakeRawVisitInfoClass``, failing
93 that the metadata fixup will try to infer the translator class from the
96 Subclasses must provide the following methods:
98 _extractDetectorName(self, dataId): returns the detector name for a CCD
99 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given
100 a dataset identifier referring to that CCD or a subcomponent of it.
102 _computeCcdExposureId(self, dataId): see below
104 _computeCoaddExposureId(self, dataId, singleFilter): see below
106 Subclasses may also need to override the following methods:
108 _transformId(self, dataId): transformation of a data identifier
109 from colloquial usage (e.g., "ccdname") to proper/actual usage
110 (e.g., "ccd"), including making suitable for path expansion (e.g. removing
111 commas). The default implementation does nothing. Note that this
112 method should not modify its input parameter.
114 getShortCcdName(self, ccdName): a static method that returns a shortened
115 name suitable for use as a filename. The default version converts spaces
118 _mapActualToPath(self, template, actualId): convert a template path to an
119 actual path, using the actual dataset identifier.
121 The mapper's behaviors are largely specified by the policy file.
122 See the MapperDictionary.paf for descriptions of the available items.
124 The 'exposures', 'calibrations', and 'datasets' subpolicies configure
125 mappings (see Mappings class).
127 Common default mappings for all subclasses can be specified in the
128 "policy/{images,exposures,calibrations,datasets}.yaml" files. This
129 provides a simple way to add a product to all camera mappers.
131 Functions to map (provide a path to the data given a dataset
132 identifier dictionary) and standardize (convert data into some standard
133 format or type) may be provided in the subclass as "map_{dataset type}"
134 and "std_{dataset type}", respectively.
136 If non-Exposure datasets cannot be retrieved using standard
137 daf_persistence methods alone, a "bypass_{dataset type}" function may be
138 provided in the subclass to return the dataset instead of using the
139 "datasets" subpolicy.
141 Implementations of map_camera and bypass_camera that should typically be
142 sufficient are provided in this base class.
148 Instead of auto-loading the camera at construction time, load it from
149 the calibration registry
153 policy : daf_persistence.Policy,
154 Policy with per-camera defaults already merged.
155 repositoryDir : string
156 Policy repository for the subclassing module (obtained with
157 getRepositoryPath() on the per-camera default dictionary).
158 root : string, optional
159 Path to the root directory for data.
160 registry : string, optional
161 Path to registry with data's metadata.
162 calibRoot : string, optional
163 Root directory for calibrations.
164 calibRegistry : string, optional
165 Path to registry with calibrations' metadata.
166 provided : list of string, optional
167 Keys provided by the mapper.
168 parentRegistry : Registry subclass, optional
169 Registry from a parent repository that may be used to look up
171 repositoryCfg : daf_persistence.RepositoryCfg or None, optional
172 The configuration information for the repository this mapper is
179 MakeRawVisitInfoClass = MakeRawVisitInfo
182 PupilFactoryClass = afwCameraGeom.PupilFactory
185 translatorClass =
None
189 _gen3instrument =
None
192 root=None, registry=None, calibRoot=None, calibRegistry=None,
193 provided=None, parentRegistry=None, repositoryCfg=None):
195 dafPersist.Mapper.__init__(self)
197 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
202 self.
root = repositoryCfg.root
206 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None
207 if repoPolicy
is not None:
208 policy.update(repoPolicy)
212 if 'levels' in policy:
213 levelsPolicy = policy[
'levels']
214 for key
in levelsPolicy.names(
True):
215 self.
levels[key] = set(levelsPolicy.asArray(key))
218 if 'defaultSubLevels' in policy:
224 root = dafPersist.LogicalLocation(root).locString()
226 self.
rootStorage = dafPersist.Storage.makeFromURI(uri=root)
234 if calibRoot
is not None:
235 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
236 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
239 calibRoot = policy.get(
'calibRoot',
None)
241 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
243 if calibStorage
is None:
251 posixIfNoSql=(
not parentRegistry))
254 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
255 if needCalibRegistry:
258 "calibRegistryPath", calibStorage,
262 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at "
263 f
"calibRoot ivar:{calibRoot} or policy['calibRoot']:{policy.get('calibRoot', None)}")
283 raise ValueError(
'class variable packageName must not be None')
293 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
294 """Initialize mappings
296 For each of the dataset types that we want to be able to read, there
297 are methods that can be created to support them:
298 * map_<dataset> : determine the path for dataset
299 * std_<dataset> : standardize the retrieved dataset
300 * bypass_<dataset> : retrieve the dataset (bypassing the usual
302 * query_<dataset> : query the registry
304 Besides the dataset types explicitly listed in the policy, we create
305 additional, derived datasets for additional conveniences,
306 e.g., reading the header of an image, retrieving only the size of a
311 policy : `lsst.daf.persistence.Policy`
312 Policy with per-camera defaults already merged
313 rootStorage : `Storage subclass instance`
314 Interface to persisted repository data.
315 calibRoot : `Storage subclass instance`
316 Interface to persisted calib repository data
317 provided : `list` of `str`
318 Keys provided by the mapper
321 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
323 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
325 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
326 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
327 dsMappingPolicy = dafPersist.Policy()
331 (
"images", imgMappingPolicy, ImageMapping),
332 (
"exposures", expMappingPolicy, ExposureMapping),
333 (
"calibrations", calMappingPolicy, CalibrationMapping),
334 (
"datasets", dsMappingPolicy, DatasetMapping)
337 for name, defPolicy, cls
in mappingList:
339 datasets = policy[name]
342 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
343 if os.path.exists(defaultsPath):
344 datasets.merge(dafPersist.Policy(defaultsPath))
347 setattr(self, name, mappings)
348 for datasetType
in datasets.names(
True):
349 subPolicy = datasets[datasetType]
350 subPolicy.merge(defPolicy)
352 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
353 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
354 subPolicy=subPolicy):
355 components = subPolicy.get(
'composite')
356 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None
357 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None
358 python = subPolicy[
'python']
359 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
360 disassembler=disassembler,
364 for name, component
in components.items():
365 butlerComposite.add(id=name,
366 datasetType=component.get(
'datasetType'),
367 setter=component.get(
'setter',
None),
368 getter=component.get(
'getter',
None),
369 subset=component.get(
'subset',
False),
370 inputOnly=component.get(
'inputOnly',
False))
371 return butlerComposite
372 setattr(self,
"map_" + datasetType, compositeClosure)
377 if name ==
"calibrations":
379 provided=provided, dataRoot=rootStorage)
381 mapping =
cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
384 raise ValueError(f
"Duplicate mapping policy for dataset type {datasetType}")
385 self.
keyDict.update(mapping.keys())
386 mappings[datasetType] = mapping
387 self.
mappings[datasetType] = mapping
388 if not hasattr(self,
"map_" + datasetType):
389 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
390 return mapping.map(mapper, dataId, write)
391 setattr(self,
"map_" + datasetType, mapClosure)
392 if not hasattr(self,
"query_" + datasetType):
393 def queryClosure(format, dataId, mapping=mapping):
394 return mapping.lookup(format, dataId)
395 setattr(self,
"query_" + datasetType, queryClosure)
396 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
397 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
398 return mapping.standardize(mapper, item, dataId)
399 setattr(self,
"std_" + datasetType, stdClosure)
401 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
402 """Set convenience methods on CameraMapper"""
403 mapName =
"map_" + datasetType +
"_" + suffix
404 bypassName =
"bypass_" + datasetType +
"_" + suffix
405 queryName =
"query_" + datasetType +
"_" + suffix
406 if not hasattr(self, mapName):
407 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
408 if not hasattr(self, bypassName):
409 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
410 bypassImpl = getattr(self,
"bypass_" + datasetType)
411 if bypassImpl
is not None:
412 setattr(self, bypassName, bypassImpl)
413 if not hasattr(self, queryName):
414 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
417 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
418 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
420 if subPolicy[
"storage"] ==
"FitsStorage":
421 def getMetadata(datasetType, pythonType, location, dataId):
422 md = readMetadata(location.getLocationsWithRoot()[0])
426 setMethods(
"md", bypassImpl=getMetadata)
429 addName =
"add_" + datasetType
430 if not hasattr(self, addName):
433 if name ==
"exposures":
434 def getSkyWcs(datasetType, pythonType, location, dataId):
435 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
436 return fitsReader.readWcs()
438 setMethods(
"wcs", bypassImpl=getSkyWcs)
440 def getRawHeaderWcs(datasetType, pythonType, location, dataId):
441 """Create a SkyWcs from the un-modified raw
442 FITS WCS header keys."""
443 if datasetType[:3] !=
"raw":
444 raise dafPersist.NoResults(
"Can only get header WCS for raw exposures.",
446 return afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0]))
448 setMethods(
"header_wcs", bypassImpl=getRawHeaderWcs)
450 def getPhotoCalib(datasetType, pythonType, location, dataId):
451 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
452 return fitsReader.readPhotoCalib()
454 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
456 def getVisitInfo(datasetType, pythonType, location, dataId):
457 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
458 return fitsReader.readVisitInfo()
460 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
463 def getFilter(datasetType, pythonType, location, dataId):
464 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
465 return fitsReader.readFilter()
467 setMethods(
"filter", bypassImpl=getFilter)
470 def getFilterLabel(datasetType, pythonType, location, dataId):
471 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
472 return fitsReader.readFilterLabel()
474 setMethods(
"filterLabel", bypassImpl=getFilterLabel)
476 setMethods(
"detector",
477 mapImpl=
lambda dataId, write=
False:
478 dafPersist.ButlerLocation(
479 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
481 storageName=
"Internal",
482 locationList=
"ignored",
487 bypassImpl=
lambda datasetType, pythonType, location, dataId:
491 def getBBox(datasetType, pythonType, location, dataId):
492 md = readMetadata(location.getLocationsWithRoot()[0], hdu=1)
494 return afwImage.bboxFromMetadata(md)
496 setMethods(
"bbox", bypassImpl=getBBox)
498 elif name ==
"images":
499 def getBBox(datasetType, pythonType, location, dataId):
500 md = readMetadata(location.getLocationsWithRoot()[0])
502 return afwImage.bboxFromMetadata(md)
503 setMethods(
"bbox", bypassImpl=getBBox)
505 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
507 def getMetadata(datasetType, pythonType, location, dataId):
508 md = readMetadata(os.path.join(location.getStorage().root,
509 location.getLocations()[0]), hdu=1)
513 setMethods(
"md", bypassImpl=getMetadata)
516 if subPolicy[
"storage"] ==
"FitsStorage":
517 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
518 subId = dataId.copy()
520 loc = mapping.map(mapper, subId, write)
521 bbox = dataId[
'bbox']
522 llcX = bbox.getMinX()
523 llcY = bbox.getMinY()
524 width = bbox.getWidth()
525 height = bbox.getHeight()
526 loc.additionalData.set(
'llcX', llcX)
527 loc.additionalData.set(
'llcY', llcY)
528 loc.additionalData.set(
'width', width)
529 loc.additionalData.set(
'height', height)
530 if 'imageOrigin' in dataId:
531 loc.additionalData.set(
'imageOrigin',
532 dataId[
'imageOrigin'])
535 def querySubClosure(key, format, dataId, mapping=mapping):
536 subId = dataId.copy()
538 return mapping.lookup(format, subId)
539 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
541 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
544 def getLen(datasetType, pythonType, location, dataId):
545 md = readMetadata(os.path.join(location.getStorage().root,
546 location.getLocations()[0]), hdu=1)
550 setMethods(
"len", bypassImpl=getLen)
553 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
554 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
555 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
556 location.getLocations()[0])))
558 def _computeCcdExposureId(self, dataId):
559 """Compute the 64-bit (long) identifier for a CCD exposure.
561 Subclasses must override
566 Data identifier with visit, ccd.
568 raise NotImplementedError()
570 def _computeCoaddExposureId(self, dataId, singleFilter):
571 """Compute the 64-bit (long) identifier for a coadd.
573 Subclasses must override
578 Data identifier with tract and patch.
579 singleFilter : `bool`
580 True means the desired ID is for a single-filter coadd, in which
581 case dataIdmust contain filter.
583 raise NotImplementedError()
585 def _search(self, path):
586 """Search for path in the associated repository's storage.
591 Path that describes an object in the repository associated with
593 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The
594 indicator will be stripped when searching and so will match
595 filenames without the HDU indicator, e.g. 'foo.fits'. The path
596 returned WILL contain the indicator though, e.g. ['foo.fits[1]'].
601 The path for this object in the repository. Will return None if the
602 object can't be found. If the input argument path contained an HDU
603 indicator, the returned path will also contain the HDU indicator.
608 """Rename any existing object with the given type and dataId.
610 The CameraMapper implementation saves objects in a sequence of e.g.:
616 All of the backups will be placed in the output repo, however, and will
617 not be removed if they are found elsewhere in the _parent chain. This
618 means that the same file will be stored twice if the previous version
619 was found in an input repo.
628 def firstElement(list):
629 """Get the first element in the list, or None if that can't be
632 return list[0]
if list
is not None and len(list)
else None
635 newLocation = self.map(datasetType, dataId, write=
True)
636 newPath = newLocation.getLocations()[0]
637 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
638 path = firstElement(path)
640 while path
is not None:
642 oldPaths.append((n, path))
643 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
644 path = firstElement(path)
645 for n, oldPath
in reversed(oldPaths):
646 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
649 """Return supported keys.
654 List of keys usable in a dataset identifier
659 """Return a dict of supported keys and their value types for a given
660 dataset type at a given level of the key hierarchy.
665 Dataset type or None for all dataset types.
666 level : `str` or None
667 Level or None for all levels or '' for the default level for the
673 Keys are strings usable in a dataset identifier, values are their
682 if datasetType
is None:
683 keyDict = copy.copy(self.
keyDict)
686 if level
is not None and level
in self.
levels:
687 keyDict = copy.copy(keyDict)
688 for lev
in self.
levels[level]:
703 """Return the name of the camera that this CameraMapper is for."""
705 className = className[className.find(
'.'):-1]
706 m = re.search(
r'(\w+)Mapper', className)
708 m = re.search(
r"class '[\w.]*?(\w+)'", className)
710 return name[:1].lower() + name[1:]
if name
else ''
714 """Return the name of the package containing this CameraMapper."""
716 raise ValueError(
'class variable packageName must not be None')
721 """Return the gen3 Instrument class equivalent for this gen2 Mapper.
726 A `~lsst.obs.base.Instrument` class.
729 raise NotImplementedError(
"Please provide a specific implementation for your instrument"
730 " to enable conversion of this gen2 repository to gen3")
735 raise ValueError(f
"Mapper {cls} has declared a gen3 instrument class of {cls._gen3instrument}"
736 " but that is not an lsst.obs.base.Instrument")
741 """Return the base directory of this package"""
745 """Map a camera dataset."""
747 raise RuntimeError(
"No camera dataset available.")
749 return dafPersist.ButlerLocation(
750 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
752 storageName=
"ConfigStorage",
760 """Return the (preloaded) camera object.
763 raise RuntimeError(
"No camera dataset available.")
767 return dafPersist.ButlerLocation(
768 pythonType=
"lsst.obs.base.ExposureIdInfo",
770 storageName=
"Internal",
771 locationList=
"ignored",
778 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure"""
779 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
780 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
784 """Disable standardization for bfKernel
786 bfKernel is a calibration product that is numpy array,
787 unlike other calibration products that are all images;
788 all calibration images are sent through _standardizeExposure
789 due to CalibrationMapping, but we don't want that to happen to bfKernel
794 """Standardize a raw dataset by converting it to an Exposure instead
797 trimmed=
False, setVisitInfo=
True)
800 """Map a sky policy."""
801 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
802 "Internal",
None,
None, self,
806 """Standardize a sky policy by returning the one we use."""
807 return self.skypolicy
815 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
817 """Set up a registry (usually SQLite3), trying a number of possible
825 Description of registry (for log messages)
829 Policy that contains the registry name, used if path is None.
831 Key in policy for registry path.
832 storage : Storage subclass
833 Repository Storage to look in.
834 searchParents : bool, optional
835 True if the search for a registry should follow any Butler v1
837 posixIfNoSql : bool, optional
838 If an sqlite registry is not found, will create a posix registry if
843 lsst.daf.persistence.Registry
846 if path
is None and policyKey
in policy:
847 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
848 if os.path.isabs(path):
849 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
850 if not storage.exists(path):
851 newPath = storage.instanceSearch(path)
853 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None
855 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
859 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
868 if path
and (path.startswith(root)):
869 path = path[len(root +
'/'):]
870 except AttributeError:
877 def search(filename, description):
878 """Search for file in storage
883 Filename to search for
885 Description of file, for error message.
889 path : `str` or `None`
890 Path to file, or None
892 result = storage.instanceSearch(filename)
895 self.
log.debug(
"Unable to locate %s: %s", description, filename)
900 path = search(
"%s.pgsql" % name,
"%s in root" % description)
902 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
904 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
907 if not storage.exists(path):
908 newPath = storage.instanceSearch(path)
909 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None
910 if newPath
is not None:
912 localFileObj = storage.getLocalFile(path)
913 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
914 registry = dafPersist.Registry.create(localFileObj.name)
916 elif not registry
and posixIfNoSql:
918 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
919 registry = dafPersist.PosixRegistry(storage.root)
925 def _transformId(self, dataId):
926 """Generate a standard ID dict from a camera-specific ID dict.
928 Canonical keys include:
929 - amp: amplifier name
930 - ccd: CCD name (in LSST this is a combination of raft and sensor)
931 The default implementation returns a copy of its input.
936 Dataset identifier; this must not be modified
941 Transformed dataset identifier.
946 def _mapActualToPath(self, template, actualId):
947 """Convert a template path to an actual path, using the actual data
948 identifier. This implementation is usually sufficient but can be
949 overridden by the subclass.
966 return template % transformedId
967 except Exception
as e:
968 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
972 """Convert a CCD name to a form useful as a filename
974 The default implementation converts spaces to underscores.
976 return ccdName.replace(
" ",
"_")
978 def _extractDetectorName(self, dataId):
979 """Extract the detector (CCD) name from the dataset identifier.
981 The name in question is the detector name used by lsst.afw.cameraGeom.
993 raise NotImplementedError(
"No _extractDetectorName() function specified")
995 def _setAmpDetector(self, item, dataId, trimmed=True):
996 """Set the detector object in an Exposure for an amplifier.
998 Defects are also added to the Exposure based on the detector object.
1002 item : `lsst.afw.image.Exposure`
1003 Exposure to set the detector in.
1007 Should detector be marked as trimmed? (ignored)
1010 return self.
_setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1012 def _setCcdDetector(self, item, dataId, trimmed=True):
1013 """Set the detector object in an Exposure for a CCD.
1017 item : `lsst.afw.image.Exposure`
1018 Exposure to set the detector in.
1022 Should detector be marked as trimmed? (ignored)
1024 if item.getDetector()
is not None:
1028 detector = self.
camera[detectorName]
1029 item.setDetector(detector)
1031 def _setFilter(self, mapping, item, dataId):
1032 """Set the filter object in an Exposure. If the Exposure had a FILTER
1033 keyword, this was already processed during load. But if it didn't,
1034 use the filter from the registry.
1038 mapping : `lsst.obs.base.Mapping`
1039 Where to get the filter from.
1040 item : `lsst.afw.image.Exposure`
1041 Exposure to set the filter in.
1046 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
1047 or isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1050 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1053 actualId = mapping.need([
'filter'], dataId)
1054 filterName = actualId[
'filter']
1056 filterName = self.
filters[filterName]
1058 item.setFilter(afwImage.Filter(filterName))
1059 except pexExcept.NotFoundError:
1060 self.
log.warn(
"Filter %s not defined. Set to UNKNOWN." % (filterName))
1062 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1063 trimmed=True, setVisitInfo=True):
1064 """Default standardization function for images.
1066 This sets the Detector from the camera geometry
1067 and optionally set the Filter. In both cases this saves
1068 having to persist some data in each exposure (or image).
1072 mapping : `lsst.obs.base.Mapping`
1073 Where to get the values from.
1074 item : image-like object
1075 Can be any of lsst.afw.image.Exposure,
1076 lsst.afw.image.DecoratedImage, lsst.afw.image.Image
1077 or lsst.afw.image.MaskedImage
1082 Set filter? Ignored if item is already an exposure
1084 Should detector be marked as trimmed?
1085 setVisitInfo : `bool`
1086 Should Exposure have its VisitInfo filled out from the metadata?
1090 `lsst.afw.image.Exposure`
1091 The standardized Exposure.
1095 setVisitInfo=setVisitInfo)
1096 except Exception
as e:
1097 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1100 if mapping.level.lower() ==
"amp":
1102 elif mapping.level.lower() ==
"ccd":
1108 if mapping.level.lower() !=
"amp" and exposure.getWcs()
is None and \
1109 (exposure.getInfo().getVisitInfo()
is not None or exposure.getMetadata().toDict()):
1117 def _createSkyWcsFromMetadata(self, exposure):
1118 """Create a SkyWcs from the FITS header metadata in an Exposure.
1122 exposure : `lsst.afw.image.Exposure`
1123 The exposure to get metadata from, and attach the SkyWcs to.
1125 metadata = exposure.getMetadata()
1128 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1129 exposure.setWcs(wcs)
1130 except pexExcept.TypeError
as e:
1133 self.
log.debug(
"wcs set to None; missing information found in metadata to create a valid wcs:"
1137 exposure.setMetadata(metadata)
1139 def _createInitialSkyWcs(self, exposure):
1140 """Create a SkyWcs from the boresight and camera geometry.
1142 If the boresight or camera geometry do not support this method of
1143 WCS creation, this falls back on the header metadata-based version
1144 (typically a purely linear FITS crval/crpix/cdmatrix WCS).
1148 exposure : `lsst.afw.image.Exposure`
1149 The exposure to get data from, and attach the SkyWcs to.
1154 if exposure.getInfo().getVisitInfo()
is None:
1155 msg =
"No VisitInfo; cannot access boresight information. Defaulting to metadata-based SkyWcs."
1159 newSkyWcs =
createInitialSkyWcs(exposure.getInfo().getVisitInfo(), exposure.getDetector())
1160 exposure.setWcs(newSkyWcs)
1161 except InitialSkyWcsError
as e:
1162 msg =
"Cannot create SkyWcs using VisitInfo and Detector, using metadata-based SkyWcs: %s"
1163 self.
log.warn(msg, e)
1164 self.
log.debug(
"Exception was: %s", traceback.TracebackException.from_exception(e))
1165 if e.__context__
is not None:
1166 self.
log.debug(
"Root-cause Exception was: %s",
1167 traceback.TracebackException.from_exception(e.__context__))
1169 def _makeCamera(self, policy, repositoryDir):
1170 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing
1173 Also set self.cameraDataLocation, if relevant (else it can be left
1176 This implementation assumes that policy contains an entry "camera"
1177 that points to the subdirectory in this package of camera data;
1178 specifically, that subdirectory must contain:
1179 - a file named `camera.py` that contains persisted camera config
1180 - ampInfo table FITS files, as required by
1181 lsst.afw.cameraGeom.makeCameraFromPath
1185 policy : `lsst.daf.persistence.Policy`
1186 Policy with per-camera defaults already merged
1187 (PexPolicy only for backward compatibility).
1188 repositoryDir : `str`
1189 Policy repository for the subclassing module (obtained with
1190 getRepositoryPath() on the per-camera default dictionary).
1192 if 'camera' not in policy:
1193 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1194 cameraDataSubdir = policy[
'camera']
1196 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1197 cameraConfig = afwCameraGeom.CameraConfig()
1200 return afwCameraGeom.makeCameraFromPath(
1201 cameraConfig=cameraConfig,
1202 ampInfoPath=ampInfoPath,
1208 """Get the registry used by this mapper.
1213 The registry used by this mapper for this mapper's repository.
1218 """Stuff image compression settings into a daf.base.PropertySet
1220 This goes into the ButlerLocation's "additionalData", which gets
1221 passed into the boost::persistence framework.
1226 Type of dataset for which to get the image compression settings.
1232 additionalData : `lsst.daf.base.PropertySet`
1233 Image compression settings.
1235 mapping = self.
mappings[datasetType]
1236 recipeName = mapping.recipe
1237 storageType = mapping.storage
1239 return dafBase.PropertySet()
1241 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1242 (datasetType, storageType, recipeName))
1243 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1244 seed = hash(tuple(dataId.items())) % 2**31
1245 for plane
in (
"image",
"mask",
"variance"):
1246 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1247 recipe.set(plane +
".scaling.seed", seed)
1250 def _initWriteRecipes(self):
1251 """Read the recipes for writing files
1253 These recipes are currently used for configuring FITS compression,
1254 but they could have wider uses for configuring different flavors
1255 of the storage types. A recipe is referred to by a symbolic name,
1256 which has associated settings. These settings are stored as a
1257 `PropertySet` so they can easily be passed down to the
1258 boost::persistence framework as the "additionalData" parameter.
1260 The list of recipes is written in YAML. A default recipe and
1261 some other convenient recipes are in obs_base/policy/writeRecipes.yaml
1262 and these may be overridden or supplemented by the individual obs_*
1263 packages' own policy/writeRecipes.yaml files.
1265 Recipes are grouped by the storage type. Currently, only the
1266 ``FitsStorage`` storage type uses recipes, which uses it to
1267 configure FITS image compression.
1269 Each ``FitsStorage`` recipe for FITS compression should define
1270 "image", "mask" and "variance" entries, each of which may contain
1271 "compression" and "scaling" entries. Defaults will be provided for
1272 any missing elements under "compression" and "scaling".
1274 The allowed entries under "compression" are:
1276 * algorithm (string): compression algorithm to use
1277 * rows (int): number of rows per tile (0 = entire dimension)
1278 * columns (int): number of columns per tile (0 = entire dimension)
1279 * quantizeLevel (float): cfitsio quantization level
1281 The allowed entries under "scaling" are:
1283 * algorithm (string): scaling algorithm to use
1284 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64)
1285 * fuzz (bool): fuzz the values when quantising floating-point values?
1286 * seed (long): seed for random number generator when fuzzing
1287 * maskPlanes (list of string): mask planes to ignore when doing
1289 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling
1290 * quantizePad: number of stdev to allow on the low side (for
1291 STDEV_POSITIVE/NEGATIVE)
1292 * bscale: manually specified BSCALE (for MANUAL scaling)
1293 * bzero: manually specified BSCALE (for MANUAL scaling)
1295 A very simple example YAML recipe:
1301 algorithm: GZIP_SHUFFLE
1305 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1306 recipes = dafPersist.Policy(recipesFile)
1307 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1308 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1309 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1310 supplements = dafPersist.Policy(supplementsFile)
1312 for entry
in validationMenu:
1313 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1315 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1316 (supplementsFile, entry, recipesFile, intersection))
1317 recipes.update(supplements)
1320 for storageType
in recipes.names(
True):
1321 if "default" not in recipes[storageType]:
1322 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1323 (storageType, recipesFile))
1324 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1328 """Generate an Exposure from an image-like object
1330 If the image is a DecoratedImage then also set its WCS and metadata
1331 (Image and MaskedImage are missing the necessary metadata
1332 and Exposure already has those set)
1336 image : Image-like object
1337 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or
1342 `lsst.afw.image.Exposure`
1343 Exposure containing input image.
1345 translatorClass =
None
1346 if mapper
is not None:
1347 translatorClass = mapper.translatorClass
1350 if isinstance(image, afwImage.MaskedImage):
1351 exposure = afwImage.makeExposure(image)
1352 elif isinstance(image, afwImage.DecoratedImage):
1353 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1354 metadata = image.getMetadata()
1355 fix_header(metadata, translator_class=translatorClass)
1356 exposure.setMetadata(metadata)
1357 elif isinstance(image, afwImage.Exposure):
1359 metadata = exposure.getMetadata()
1360 fix_header(metadata, translator_class=translatorClass)
1362 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1365 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1366 if metadata
is not None:
1369 logger = lsstLog.Log.getLogger(
"CameraMapper")
1370 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1372 exposureId = mapper._computeCcdExposureId(dataId)
1373 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1375 exposure.getInfo().setVisitInfo(visitInfo)
1381 """Validate recipes for FitsStorage
1383 The recipes are supplemented with default values where appropriate.
1385 TODO: replace this custom validation code with Cerberus (DM-11846)
1389 recipes : `lsst.daf.persistence.Policy`
1390 FitsStorage recipes to validate.
1394 validated : `lsst.daf.base.PropertySet`
1395 Validated FitsStorage recipe.
1400 If validation fails.
1404 compressionSchema = {
1405 "algorithm":
"NONE",
1408 "quantizeLevel": 0.0,
1411 "algorithm":
"NONE",
1413 "maskPlanes": [
"NO_DATA"],
1415 "quantizeLevel": 4.0,
1422 def checkUnrecognized(entry, allowed, description):
1423 """Check to see if the entry contains unrecognised keywords"""
1424 unrecognized = set(entry.keys()) - set(allowed)
1427 "Unrecognized entries when parsing image compression recipe %s: %s" %
1428 (description, unrecognized))
1431 for name
in recipes.names(
True):
1432 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1433 rr = dafBase.PropertySet()
1434 validated[name] = rr
1435 for plane
in (
"image",
"mask",
"variance"):
1436 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1437 name +
"->" + plane)
1439 for settings, schema
in ((
"compression", compressionSchema),
1440 (
"scaling", scalingSchema)):
1441 prefix = plane +
"." + settings
1442 if settings
not in recipes[name][plane]:
1444 rr.set(prefix +
"." + key, schema[key])
1446 entry = recipes[name][plane][settings]
1447 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1449 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1450 rr.set(prefix +
"." + key, value)