25 from astropy.io
import fits
28 import lsst.daf.persistence
as dafPersist
29 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base
as dafBase
31 import lsst.afw.geom
as afwGeom
32 import lsst.afw.image
as afwImage
33 import lsst.afw.table
as afwTable
34 from lsst.afw.fits
import readMetadata
35 import lsst.afw.cameraGeom
as afwCameraGeom
36 import lsst.log
as lsstLog
38 from .exposureIdInfo
import ExposureIdInfo
39 from .makeRawVisitInfo
import MakeRawVisitInfo
42 __all__ = [
"CameraMapper",
"exposureFromImage"]
47 """CameraMapper is a base class for mappers that handle images from a 48 camera and products derived from them. This provides an abstraction layer 49 between the data on disk and the code. 51 Public methods: keys, queryMetadata, getDatasetTypes, map, 52 canStandardize, standardize 54 Mappers for specific data sources (e.g., CFHT Megacam, LSST 55 simulations, etc.) should inherit this class. 57 The CameraMapper manages datasets within a "root" directory. Note that 58 writing to a dataset present in the input root will hide the existing 59 dataset but not overwrite it. See #2160 for design discussion. 61 A camera is assumed to consist of one or more rafts, each composed of 62 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 63 (amps). A camera is also assumed to have a camera geometry description 64 (CameraGeom object) as a policy file, a filter description (Filter class 65 static configuration) as another policy file, and an optional defects 66 description directory. 68 Information from the camera geometry and defects are inserted into all 69 Exposure objects returned. 71 The mapper uses one or two registries to retrieve metadata about the 72 images. The first is a registry of all raw exposures. This must contain 73 the time of the observation. One or more tables (or the equivalent) 74 within the registry are used to look up data identifier components that 75 are not specified by the user (e.g. filter) and to return results for 76 metadata queries. The second is an optional registry of all calibration 77 data. This should contain validity start and end entries for each 78 calibration dataset in the same timescale as the observation time. 80 Subclasses will typically set MakeRawVisitInfoClass: 82 MakeRawVisitInfoClass: a class variable that points to a subclass of 83 MakeRawVisitInfo, a functor that creates an 84 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 86 Subclasses must provide the following methods: 88 _extractDetectorName(self, dataId): returns the detector name for a CCD 89 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 90 a dataset identifier referring to that CCD or a subcomponent of it. 92 _computeCcdExposureId(self, dataId): see below 94 _computeCoaddExposureId(self, dataId, singleFilter): see below 96 Subclasses may also need to override the following methods: 98 _transformId(self, dataId): transformation of a data identifier 99 from colloquial usage (e.g., "ccdname") to proper/actual usage 100 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 101 commas). The default implementation does nothing. Note that this 102 method should not modify its input parameter. 104 getShortCcdName(self, ccdName): a static method that returns a shortened 105 name suitable for use as a filename. The default version converts spaces 108 _getCcdKeyVal(self, dataId): return a CCD key and value 109 by which to look up defects in the defects registry. 110 The default value returns ("ccd", detector name) 112 _mapActualToPath(self, template, actualId): convert a template path to an 113 actual path, using the actual dataset identifier. 115 The mapper's behaviors are largely specified by the policy file. 116 See the MapperDictionary.paf for descriptions of the available items. 118 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 119 mappings (see Mappings class). 121 Common default mappings for all subclasses can be specified in the 122 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 123 provides a simple way to add a product to all camera mappers. 125 Functions to map (provide a path to the data given a dataset 126 identifier dictionary) and standardize (convert data into some standard 127 format or type) may be provided in the subclass as "map_{dataset type}" 128 and "std_{dataset type}", respectively. 130 If non-Exposure datasets cannot be retrieved using standard 131 daf_persistence methods alone, a "bypass_{dataset type}" function may be 132 provided in the subclass to return the dataset instead of using the 133 "datasets" subpolicy. 135 Implementations of map_camera and bypass_camera that should typically be 136 sufficient are provided in this base class. 142 - Handle defects the same was as all other calibration products, using the 144 - Instead of auto-loading the camera at construction time, load it from 145 the calibration registry 146 - Rewrite defects as AFW tables so we don't need astropy.io.fits to 147 unpersist them; then remove all mention of astropy.io.fits from this 154 MakeRawVisitInfoClass = MakeRawVisitInfo
157 PupilFactoryClass = afwCameraGeom.PupilFactory
159 def __init__(self, policy, repositoryDir,
160 root=None, registry=None, calibRoot=None, calibRegistry=None,
161 provided=None, parentRegistry=None, repositoryCfg=None):
162 """Initialize the CameraMapper. 166 policy : daf_persistence.Policy, 167 Policy with per-camera defaults already merged. 168 repositoryDir : string 169 Policy repository for the subclassing module (obtained with 170 getRepositoryPath() on the per-camera default dictionary). 171 root : string, optional 172 Path to the root directory for data. 173 registry : string, optional 174 Path to registry with data's metadata. 175 calibRoot : string, optional 176 Root directory for calibrations. 177 calibRegistry : string, optional 178 Path to registry with calibrations' metadata. 179 provided : list of string, optional 180 Keys provided by the mapper. 181 parentRegistry : Registry subclass, optional 182 Registry from a parent repository that may be used to look up 184 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 185 The configuration information for the repository this mapper is 189 dafPersist.Mapper.__init__(self)
191 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
196 self.
root = repositoryCfg.root
200 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 201 if repoPolicy
is not None:
202 policy.update(repoPolicy)
206 if 'levels' in policy:
207 levelsPolicy = policy[
'levels']
208 for key
in levelsPolicy.names(
True):
209 self.
levels[key] = set(levelsPolicy.asArray(key))
212 if 'defaultSubLevels' in policy:
218 root = dafPersist.LogicalLocation(root).locString()
228 if calibRoot
is not None:
229 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
230 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
233 calibRoot = policy.get(
'calibRoot',
None)
235 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
237 if calibStorage
is None:
245 posixIfNoSql=(
not parentRegistry))
248 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
249 if needCalibRegistry:
252 "calibRegistryPath", calibStorage,
256 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
257 "calibRoot ivar:%s or policy['calibRoot']:%s" %
258 (calibRoot, policy.get(
'calibRoot',
None)))
275 if 'defects' in policy:
276 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
277 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
278 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
286 raise ValueError(
'class variable packageName must not be None')
290 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
291 """Initialize mappings 293 For each of the dataset types that we want to be able to read, there 294 are methods that can be created to support them: 295 * map_<dataset> : determine the path for dataset 296 * std_<dataset> : standardize the retrieved dataset 297 * bypass_<dataset> : retrieve the dataset (bypassing the usual 299 * query_<dataset> : query the registry 301 Besides the dataset types explicitly listed in the policy, we create 302 additional, derived datasets for additional conveniences, 303 e.g., reading the header of an image, retrieving only the size of a 308 policy : `lsst.daf.persistence.Policy` 309 Policy with per-camera defaults already merged 310 rootStorage : `Storage subclass instance` 311 Interface to persisted repository data. 312 calibRoot : `Storage subclass instance` 313 Interface to persisted calib repository data 314 provided : `list` of `str` 315 Keys provided by the mapper 318 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
319 "obs_base",
"ImageMappingDefaults.yaml",
"policy"))
320 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
321 "obs_base",
"ExposureMappingDefaults.yaml",
"policy"))
322 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323 "obs_base",
"CalibrationMappingDefaults.yaml",
"policy"))
324 dsMappingPolicy = dafPersist.Policy()
328 (
"images", imgMappingPolicy, ImageMapping),
329 (
"exposures", expMappingPolicy, ExposureMapping),
330 (
"calibrations", calMappingPolicy, CalibrationMapping),
331 (
"datasets", dsMappingPolicy, DatasetMapping)
334 for name, defPolicy, cls
in mappingList:
336 datasets = policy[name]
339 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
340 if os.path.exists(defaultsPath):
341 datasets.merge(dafPersist.Policy(defaultsPath))
344 setattr(self, name, mappings)
345 for datasetType
in datasets.names(
True):
346 subPolicy = datasets[datasetType]
347 subPolicy.merge(defPolicy)
349 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
350 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
351 subPolicy=subPolicy):
352 components = subPolicy.get(
'composite')
353 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 354 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 355 python = subPolicy[
'python']
356 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
357 disassembler=disassembler,
361 for name, component
in components.items():
362 butlerComposite.add(id=name,
363 datasetType=component.get(
'datasetType'),
364 setter=component.get(
'setter',
None),
365 getter=component.get(
'getter',
None),
366 subset=component.get(
'subset',
False),
367 inputOnly=component.get(
'inputOnly',
False))
368 return butlerComposite
369 setattr(self,
"map_" + datasetType, compositeClosure)
373 if name ==
"calibrations":
375 provided=provided, dataRoot=rootStorage)
377 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
378 self.
keyDict.update(mapping.keys())
379 mappings[datasetType] = mapping
380 self.
mappings[datasetType] = mapping
381 if not hasattr(self,
"map_" + datasetType):
382 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
383 return mapping.map(mapper, dataId, write)
384 setattr(self,
"map_" + datasetType, mapClosure)
385 if not hasattr(self,
"query_" + datasetType):
386 def queryClosure(format, dataId, mapping=mapping):
387 return mapping.lookup(format, dataId)
388 setattr(self,
"query_" + datasetType, queryClosure)
389 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
390 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
391 return mapping.standardize(mapper, item, dataId)
392 setattr(self,
"std_" + datasetType, stdClosure)
394 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
395 """Set convenience methods on CameraMapper""" 396 mapName =
"map_" + datasetType +
"_" + suffix
397 bypassName =
"bypass_" + datasetType +
"_" + suffix
398 queryName =
"query_" + datasetType +
"_" + suffix
399 if not hasattr(self, mapName):
400 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
401 if not hasattr(self, bypassName):
402 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
403 bypassImpl = getattr(self,
"bypass_" + datasetType)
404 if bypassImpl
is not None:
405 setattr(self, bypassName, bypassImpl)
406 if not hasattr(self, queryName):
407 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
410 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
411 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
413 if subPolicy[
"storage"] ==
"FitsStorage":
414 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
415 readMetadata(location.getLocationsWithRoot()[0]))
418 addName =
"add_" + datasetType
419 if not hasattr(self, addName):
422 if name ==
"exposures":
423 def getSkyWcs(datasetType, pythonType, location, dataId):
424 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
425 return fitsReader.readWcs()
427 setMethods(
"wcs", bypassImpl=getSkyWcs)
429 def getPhotoCalib(datasetType, pythonType, location, dataId):
430 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
431 return fitsReader.readPhotoCalib()
433 setMethods(
"photoCalib", bypassImpl=getPhotoCalib)
435 def getVisitInfo(datasetType, pythonType, location, dataId):
436 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
437 return fitsReader.readVisitInfo()
439 setMethods(
"visitInfo", bypassImpl=getVisitInfo)
441 def getFilter(datasetType, pythonType, location, dataId):
442 fitsReader = afwImage.ExposureFitsReader(location.getLocationsWithRoot()[0])
443 return fitsReader.readFilter()
445 setMethods(
"filter", bypassImpl=getFilter)
447 setMethods(
"detector",
448 mapImpl=
lambda dataId, write=
False:
449 dafPersist.ButlerLocation(
450 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
452 storageName=
"Internal",
453 locationList=
"ignored",
458 bypassImpl=
lambda datasetType, pythonType, location, dataId:
461 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
462 afwImage.bboxFromMetadata(
463 readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
465 elif name ==
"images":
466 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
467 afwImage.bboxFromMetadata(
468 readMetadata(location.getLocationsWithRoot()[0])))
470 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
471 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
472 readMetadata(os.path.join(location.getStorage().root,
473 location.getLocations()[0]), hdu=1))
476 if subPolicy[
"storage"] ==
"FitsStorage":
477 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
478 subId = dataId.copy()
480 loc = mapping.map(mapper, subId, write)
481 bbox = dataId[
'bbox']
482 llcX = bbox.getMinX()
483 llcY = bbox.getMinY()
484 width = bbox.getWidth()
485 height = bbox.getHeight()
486 loc.additionalData.set(
'llcX', llcX)
487 loc.additionalData.set(
'llcY', llcY)
488 loc.additionalData.set(
'width', width)
489 loc.additionalData.set(
'height', height)
490 if 'imageOrigin' in dataId:
491 loc.additionalData.set(
'imageOrigin',
492 dataId[
'imageOrigin'])
495 def querySubClosure(key, format, dataId, mapping=mapping):
496 subId = dataId.copy()
498 return mapping.lookup(format, subId)
499 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
501 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
503 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
504 readMetadata(os.path.join(location.getStorage().root,
505 location.getLocations()[0]),
506 hdu=1).getScalar(
"NAXIS2"))
509 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
510 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
511 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
512 location.getLocations()[0])))
514 def _computeCcdExposureId(self, dataId):
515 """Compute the 64-bit (long) identifier for a CCD exposure. 517 Subclasses must override 522 Data identifier with visit, ccd. 524 raise NotImplementedError()
526 def _computeCoaddExposureId(self, dataId, singleFilter):
527 """Compute the 64-bit (long) identifier for a coadd. 529 Subclasses must override 534 Data identifier with tract and patch. 535 singleFilter : `bool` 536 True means the desired ID is for a single-filter coadd, in which 537 case dataIdmust contain filter. 539 raise NotImplementedError()
541 def _search(self, path):
542 """Search for path in the associated repository's storage. 547 Path that describes an object in the repository associated with 549 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 550 indicator will be stripped when searching and so will match 551 filenames without the HDU indicator, e.g. 'foo.fits'. The path 552 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 557 The path for this object in the repository. Will return None if the 558 object can't be found. If the input argument path contained an HDU 559 indicator, the returned path will also contain the HDU indicator. 564 """Rename any existing object with the given type and dataId. 566 The CameraMapper implementation saves objects in a sequence of e.g.: 572 All of the backups will be placed in the output repo, however, and will 573 not be removed if they are found elsewhere in the _parent chain. This 574 means that the same file will be stored twice if the previous version 575 was found in an input repo. 584 def firstElement(list):
585 """Get the first element in the list, or None if that can't be 588 return list[0]
if list
is not None and len(list)
else None 591 newLocation = self.map(datasetType, dataId, write=
True)
592 newPath = newLocation.getLocations()[0]
593 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
594 path = firstElement(path)
596 while path
is not None:
598 oldPaths.append((n, path))
599 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
600 path = firstElement(path)
601 for n, oldPath
in reversed(oldPaths):
602 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
605 """Return supported keys. 610 List of keys usable in a dataset identifier 615 """Return a dict of supported keys and their value types for a given 616 dataset type at a given level of the key hierarchy. 621 Dataset type or None for all dataset types. 622 level : `str` or None 623 Level or None for all levels or '' for the default level for the 629 Keys are strings usable in a dataset identifier, values are their 637 if datasetType
is None:
638 keyDict = copy.copy(self.
keyDict)
641 if level
is not None and level
in self.
levels:
642 keyDict = copy.copy(keyDict)
643 for l
in self.
levels[level]:
658 """Return the name of the camera that this CameraMapper is for.""" 660 className = className[className.find(
'.'):-1]
661 m = re.search(
r'(\w+)Mapper', className)
663 m = re.search(
r"class '[\w.]*?(\w+)'", className)
665 return name[:1].lower() + name[1:]
if name
else '' 669 """Return the name of the package containing this CameraMapper.""" 671 raise ValueError(
'class variable packageName must not be None')
676 """Return the base directory of this package""" 680 """Map a camera dataset.""" 682 raise RuntimeError(
"No camera dataset available.")
684 return dafPersist.ButlerLocation(
685 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
687 storageName=
"ConfigStorage",
695 """Return the (preloaded) camera object. 698 raise RuntimeError(
"No camera dataset available.")
702 """Map defects dataset. 706 `lsst.daf.butler.ButlerLocation` 707 Minimal ButlerLocation containing just the locationList field 708 (just enough information that bypass_defects can use it). 711 if defectFitsPath
is None:
712 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
714 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
719 """Return a defect based on the butler location returned by map_defects 723 butlerLocation : `lsst.daf.persistence.ButlerLocation` 724 locationList = path to defects FITS file 726 Butler data ID; "ccd" must be set. 728 Note: the name "bypass_XXX" means the butler makes no attempt to 729 convert the ButlerLocation into an object, which is what we want for 730 now, since that conversion is a bit tricky. 733 defectsFitsPath = butlerLocation.locationList[0]
735 with fits.open(defectsFitsPath)
as hduList:
736 for hdu
in hduList[1:]:
737 if hdu.header[
"name"] != detectorName:
741 for data
in hdu.data:
742 bbox = afwGeom.Box2I(
743 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
744 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
746 defectList.append(afwImage.DefectBase(bbox))
749 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
752 return dafPersist.ButlerLocation(
753 pythonType=
"lsst.obs.base.ExposureIdInfo",
755 storageName=
"Internal",
756 locationList=
"ignored",
763 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 764 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
765 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
769 """Disable standardization for bfKernel 771 bfKernel is a calibration product that is numpy array, 772 unlike other calibration products that are all images; 773 all calibration images are sent through _standardizeExposure 774 due to CalibrationMapping, but we don't want that to happen to bfKernel 779 """Standardize a raw dataset by converting it to an Exposure instead 782 trimmed=
False, setVisitInfo=
True)
785 """Map a sky policy.""" 786 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
787 "Internal",
None,
None, self,
791 """Standardize a sky policy by returning the one we use.""" 792 return self.skypolicy
800 def _getCcdKeyVal(self, dataId):
801 """Return CCD key and value used to look a defect in the defect 804 The default implementation simply returns ("ccd", full detector name) 808 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
810 """Set up a registry (usually SQLite3), trying a number of possible 818 Description of registry (for log messages) 822 Policy that contains the registry name, used if path is None. 824 Key in policy for registry path. 825 storage : Storage subclass 826 Repository Storage to look in. 827 searchParents : bool, optional 828 True if the search for a registry should follow any Butler v1 830 posixIfNoSql : bool, optional 831 If an sqlite registry is not found, will create a posix registry if 836 lsst.daf.persistence.Registry 839 if path
is None and policyKey
in policy:
840 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
841 if os.path.isabs(path):
842 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
843 if not storage.exists(path):
844 newPath = storage.instanceSearch(path)
846 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 848 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
852 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
860 if path
and (path.startswith(root)):
861 path = path[len(root +
'/'):]
862 except AttributeError:
868 def search(filename, description):
869 """Search for file in storage 874 Filename to search for 876 Description of file, for error message. 880 path : `str` or `None` 881 Path to file, or None 883 result = storage.instanceSearch(filename)
886 self.
log.debug(
"Unable to locate %s: %s", description, filename)
891 path = search(
"%s.pgsql" % name,
"%s in root" % description)
893 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
895 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
898 if not storage.exists(path):
899 newPath = storage.instanceSearch(path)
900 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 901 if newPath
is not None:
903 localFileObj = storage.getLocalFile(path)
904 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
905 registry = dafPersist.Registry.create(localFileObj.name)
907 elif not registry
and posixIfNoSql:
909 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
910 registry = dafPersist.PosixRegistry(storage.root)
916 def _transformId(self, dataId):
917 """Generate a standard ID dict from a camera-specific ID dict. 919 Canonical keys include: 920 - amp: amplifier name 921 - ccd: CCD name (in LSST this is a combination of raft and sensor) 922 The default implementation returns a copy of its input. 927 Dataset identifier; this must not be modified 932 Transformed dataset identifier. 937 def _mapActualToPath(self, template, actualId):
938 """Convert a template path to an actual path, using the actual data 939 identifier. This implementation is usually sufficient but can be 940 overridden by the subclass. 957 return template % transformedId
958 except Exception
as e:
959 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
963 """Convert a CCD name to a form useful as a filename 965 The default implementation converts spaces to underscores. 967 return ccdName.replace(
" ",
"_")
969 def _extractDetectorName(self, dataId):
970 """Extract the detector (CCD) name from the dataset identifier. 972 The name in question is the detector name used by lsst.afw.cameraGeom. 984 raise NotImplementedError(
"No _extractDetectorName() function specified")
986 def _extractAmpId(self, dataId):
987 """Extract the amplifier identifer from a dataset identifier. 989 .. note:: Deprecated in 11_0 991 amplifier identifier has two parts: the detector name for the CCD 992 containing the amplifier and index of the amplifier in the detector. 1002 Amplifier identifier 1006 return (trDataId[
"ccd"], int(trDataId[
'amp']))
1008 def _setAmpDetector(self, item, dataId, trimmed=True):
1009 """Set the detector object in an Exposure for an amplifier. 1011 Defects are also added to the Exposure based on the detector object. 1015 item : `lsst.afw.image.Exposure` 1016 Exposure to set the detector in. 1020 Should detector be marked as trimmed? (ignored) 1023 return self.
_setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1025 def _setCcdDetector(self, item, dataId, trimmed=True):
1026 """Set the detector object in an Exposure for a CCD. 1030 item : `lsst.afw.image.Exposure` 1031 Exposure to set the detector in. 1035 Should detector be marked as trimmed? (ignored) 1037 if item.getDetector()
is not None:
1041 detector = self.
camera[detectorName]
1042 item.setDetector(detector)
1044 def _setFilter(self, mapping, item, dataId):
1045 """Set the filter object in an Exposure. If the Exposure had a FILTER 1046 keyword, this was already processed during load. But if it didn't, 1047 use the filter from the registry. 1051 mapping : `lsst.obs.base.Mapping` 1052 Where to get the filter from. 1053 item : `lsst.afw.image.Exposure` 1054 Exposure to set the filter in. 1059 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 1060 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1063 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1066 actualId = mapping.need([
'filter'], dataId)
1067 filterName = actualId[
'filter']
1069 filterName = self.
filters[filterName]
1070 item.setFilter(afwImage.Filter(filterName))
1073 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1074 trimmed=True, setVisitInfo=True):
1075 """Default standardization function for images. 1077 This sets the Detector from the camera geometry 1078 and optionally set the Fiter. In both cases this saves 1079 having to persist some data in each exposure (or image). 1083 mapping : `lsst.obs.base.Mapping` 1084 Where to get the values from. 1085 item : image-like object 1086 Can be any of lsst.afw.image.Exposure, 1087 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1088 or lsst.afw.image.MaskedImage 1093 Set filter? Ignored if item is already an exposure 1095 Should detector be marked as trimmed? 1096 setVisitInfo : `bool` 1097 Should Exposure have its VisitInfo filled out from the metadata? 1101 `lsst.afw.image.Exposure` 1102 The standardized Exposure. 1105 item =
exposureFromImage(item, dataId, mapper=self, logger=self.
log, setVisitInfo=setVisitInfo)
1106 except Exception
as e:
1107 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1110 if mapping.level.lower() ==
"amp":
1112 elif mapping.level.lower() ==
"ccd":
1120 def _defectLookup(self, dataId, dateKey='taiObs'):
1121 """Find the defects for a given CCD. 1131 Path to the defects file or None if not available. 1136 raise RuntimeError(
"No registry for defect lookup")
1140 dataIdForLookup = {
'visit': dataId[
'visit']}
1142 rows = self.
registry.lookup((dateKey), (
'raw_visit'), dataIdForLookup)
1145 assert len(rows) == 1
1151 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1153 if not rows
or len(rows) == 0:
1156 return os.path.join(self.
defectPath, rows[0][0])
1158 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1159 (ccdVal, dayObs, len(rows),
", ".join([_[0]
for _
in rows])))
1161 def _makeCamera(self, policy, repositoryDir):
1162 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 1165 Also set self.cameraDataLocation, if relevant (else it can be left 1168 This implementation assumes that policy contains an entry "camera" 1169 that points to the subdirectory in this package of camera data; 1170 specifically, that subdirectory must contain: 1171 - a file named `camera.py` that contains persisted camera config 1172 - ampInfo table FITS files, as required by 1173 lsst.afw.cameraGeom.makeCameraFromPath 1177 policy : `lsst.daf.persistence.Policy` 1178 Policy with per-camera defaults already merged 1179 (PexPolicy only for backward compatibility). 1180 repositoryDir : `str` 1181 Policy repository for the subclassing module (obtained with 1182 getRepositoryPath() on the per-camera default dictionary). 1184 if 'camera' not in policy:
1185 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1186 cameraDataSubdir = policy[
'camera']
1188 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1189 cameraConfig = afwCameraGeom.CameraConfig()
1192 return afwCameraGeom.makeCameraFromPath(
1193 cameraConfig=cameraConfig,
1194 ampInfoPath=ampInfoPath,
1200 """Get the registry used by this mapper. 1205 The registry used by this mapper for this mapper's repository. 1210 """Stuff image compression settings into a daf.base.PropertySet 1212 This goes into the ButlerLocation's "additionalData", which gets 1213 passed into the boost::persistence framework. 1218 Type of dataset for which to get the image compression settings. 1224 additionalData : `lsst.daf.base.PropertySet` 1225 Image compression settings. 1227 mapping = self.
mappings[datasetType]
1228 recipeName = mapping.recipe
1229 storageType = mapping.storage
1231 return dafBase.PropertySet()
1233 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1234 (datasetType, storageType, recipeName))
1235 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1236 seed = hash(tuple(dataId.items())) % 2**31
1237 for plane
in (
"image",
"mask",
"variance"):
1238 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1239 recipe.set(plane +
".scaling.seed", seed)
1242 def _initWriteRecipes(self):
1243 """Read the recipes for writing files 1245 These recipes are currently used for configuring FITS compression, 1246 but they could have wider uses for configuring different flavors 1247 of the storage types. A recipe is referred to by a symbolic name, 1248 which has associated settings. These settings are stored as a 1249 `PropertySet` so they can easily be passed down to the 1250 boost::persistence framework as the "additionalData" parameter. 1252 The list of recipes is written in YAML. A default recipe and 1253 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1254 and these may be overridden or supplemented by the individual obs_* 1255 packages' own policy/writeRecipes.yaml files. 1257 Recipes are grouped by the storage type. Currently, only the 1258 ``FitsStorage`` storage type uses recipes, which uses it to 1259 configure FITS image compression. 1261 Each ``FitsStorage`` recipe for FITS compression should define 1262 "image", "mask" and "variance" entries, each of which may contain 1263 "compression" and "scaling" entries. Defaults will be provided for 1264 any missing elements under "compression" and "scaling". 1266 The allowed entries under "compression" are: 1268 * algorithm (string): compression algorithm to use 1269 * rows (int): number of rows per tile (0 = entire dimension) 1270 * columns (int): number of columns per tile (0 = entire dimension) 1271 * quantizeLevel (float): cfitsio quantization level 1273 The allowed entries under "scaling" are: 1275 * algorithm (string): scaling algorithm to use 1276 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1277 * fuzz (bool): fuzz the values when quantising floating-point values? 1278 * seed (long): seed for random number generator when fuzzing 1279 * maskPlanes (list of string): mask planes to ignore when doing 1281 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1282 * quantizePad: number of stdev to allow on the low side (for 1283 STDEV_POSITIVE/NEGATIVE) 1284 * bscale: manually specified BSCALE (for MANUAL scaling) 1285 * bzero: manually specified BSCALE (for MANUAL scaling) 1287 A very simple example YAML recipe: 1293 algorithm: GZIP_SHUFFLE 1297 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1298 recipes = dafPersist.Policy(recipesFile)
1299 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1300 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1301 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1302 supplements = dafPersist.Policy(supplementsFile)
1304 for entry
in validationMenu:
1305 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1307 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1308 (supplementsFile, entry, recipesFile, intersection))
1309 recipes.update(supplements)
1312 for storageType
in recipes.names(
True):
1313 if "default" not in recipes[storageType]:
1314 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1315 (storageType, recipesFile))
1316 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1320 """Generate an Exposure from an image-like object 1322 If the image is a DecoratedImage then also set its WCS and metadata 1323 (Image and MaskedImage are missing the necessary metadata 1324 and Exposure already has those set) 1328 image : Image-like object 1329 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1334 `lsst.afw.image.Exposure` 1335 Exposure containing input image. 1338 if isinstance(image, afwImage.MaskedImage):
1339 exposure = afwImage.makeExposure(image)
1340 elif isinstance(image, afwImage.DecoratedImage):
1341 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1342 metadata = image.getMetadata()
1344 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1345 exposure.setWcs(wcs)
1346 except pexExcept.TypeError
as e:
1349 logger = lsstLog.Log.getLogger(
"CameraMapper")
1350 logger.debug(
"wcs set to None; insufficient information found in metadata to create a valid wcs:" 1353 exposure.setMetadata(metadata)
1354 elif isinstance(image, afwImage.Exposure):
1357 metadata = exposure.getMetadata()
1360 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1364 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1365 if metadata
is not None:
1368 logger = lsstLog.Log.getLogger(
"CameraMapper")
1369 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1371 exposureId = mapper._computeCcdExposureId(dataId)
1372 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1374 exposure.getInfo().setVisitInfo(visitInfo)
1380 """Validate recipes for FitsStorage 1382 The recipes are supplemented with default values where appropriate. 1384 TODO: replace this custom validation code with Cerberus (DM-11846) 1388 recipes : `lsst.daf.persistence.Policy` 1389 FitsStorage recipes to validate. 1393 validated : `lsst.daf.base.PropertySet` 1394 Validated FitsStorage recipe. 1399 If validation fails. 1403 compressionSchema = {
1404 "algorithm":
"NONE",
1407 "quantizeLevel": 0.0,
1410 "algorithm":
"NONE",
1412 "maskPlanes": [
"NO_DATA"],
1414 "quantizeLevel": 4.0,
1421 def checkUnrecognized(entry, allowed, description):
1422 """Check to see if the entry contains unrecognised keywords""" 1423 unrecognized = set(entry.keys()) - set(allowed)
1426 "Unrecognized entries when parsing image compression recipe %s: %s" %
1427 (description, unrecognized))
1430 for name
in recipes.names(
True):
1431 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1432 rr = dafBase.PropertySet()
1433 validated[name] = rr
1434 for plane
in (
"image",
"mask",
"variance"):
1435 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1436 name +
"->" + plane)
1438 for settings, schema
in ((
"compression", compressionSchema),
1439 (
"scaling", scalingSchema)):
1440 prefix = plane +
"." + settings
1441 if settings
not in recipes[name][plane]:
1443 rr.set(prefix +
"." + key, schema[key])
1445 entry = recipes[name][plane][settings]
1446 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1448 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1449 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def _defectLookup(self, dataId, dateKey='taiObs')
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.