23 from builtins
import str
31 import lsst.daf.persistence
as dafPersist
32 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
33 import lsst.daf.base
as dafBase
34 import lsst.afw.geom
as afwGeom
35 import lsst.afw.image
as afwImage
36 import lsst.afw.table
as afwTable
37 import lsst.afw.cameraGeom
as afwCameraGeom
38 import lsst.log
as lsstLog
39 import lsst.pex.policy
as pexPolicy
40 import lsst.pex.exceptions
as pexExcept
41 from .exposureIdInfo
import ExposureIdInfo
42 from .makeRawVisitInfo
import MakeRawVisitInfo
43 from lsst.utils
import getPackageDir
45 """This module defines the CameraMapper base class.""" 50 """CameraMapper is a base class for mappers that handle images from a 51 camera and products derived from them. This provides an abstraction layer 52 between the data on disk and the code. 54 Public methods: keys, queryMetadata, getDatasetTypes, map, 55 canStandardize, standardize 57 Mappers for specific data sources (e.g., CFHT Megacam, LSST 58 simulations, etc.) should inherit this class. 60 The CameraMapper manages datasets within a "root" directory. Note that 61 writing to a dataset present in the input root will hide the existing 62 dataset but not overwrite it. See #2160 for design discussion. 64 A camera is assumed to consist of one or more rafts, each composed of 65 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 66 (amps). A camera is also assumed to have a camera geometry description 67 (CameraGeom object) as a policy file, a filter description (Filter class 68 static configuration) as another policy file, and an optional defects 69 description directory. 71 Information from the camera geometry and defects are inserted into all 72 Exposure objects returned. 74 The mapper uses one or two registries to retrieve metadata about the 75 images. The first is a registry of all raw exposures. This must contain 76 the time of the observation. One or more tables (or the equivalent) 77 within the registry are used to look up data identifier components that 78 are not specified by the user (e.g. filter) and to return results for 79 metadata queries. The second is an optional registry of all calibration 80 data. This should contain validity start and end entries for each 81 calibration dataset in the same timescale as the observation time. 83 Subclasses will typically set MakeRawVisitInfoClass: 85 MakeRawVisitInfoClass: a class variable that points to a subclass of 86 MakeRawVisitInfo, a functor that creates an 87 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 89 Subclasses must provide the following methods: 91 _extractDetectorName(self, dataId): returns the detector name for a CCD 92 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 93 a dataset identifier referring to that CCD or a subcomponent of it. 95 _computeCcdExposureId(self, dataId): see below 97 _computeCoaddExposureId(self, dataId, singleFilter): see below 99 Subclasses may also need to override the following methods: 101 _transformId(self, dataId): transformation of a data identifier 102 from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"), 103 including making suitable for path expansion (e.g. removing commas). 104 The default implementation does nothing. Note that this 105 method should not modify its input parameter. 107 getShortCcdName(self, ccdName): a static method that returns a shortened name 108 suitable for use as a filename. The default version converts spaces to underscores. 110 _getCcdKeyVal(self, dataId): return a CCD key and value 111 by which to look up defects in the defects registry. 112 The default value returns ("ccd", detector name) 114 _mapActualToPath(self, template, actualId): convert a template path to an 115 actual path, using the actual dataset identifier. 117 The mapper's behaviors are largely specified by the policy file. 118 See the MapperDictionary.paf for descriptions of the available items. 120 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 121 mappings (see Mappings class). 123 Common default mappings for all subclasses can be specified in the 124 "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides 125 a simple way to add a product to all camera mappers. 127 Functions to map (provide a path to the data given a dataset 128 identifier dictionary) and standardize (convert data into some standard 129 format or type) may be provided in the subclass as "map_{dataset type}" 130 and "std_{dataset type}", respectively. 132 If non-Exposure datasets cannot be retrieved using standard 133 daf_persistence methods alone, a "bypass_{dataset type}" function may be 134 provided in the subclass to return the dataset instead of using the 135 "datasets" subpolicy. 137 Implementations of map_camera and bypass_camera that should typically be 138 sufficient are provided in this base class. 141 * Handle defects the same was as all other calibration products, using the calibration registry 142 * Instead of auto-loading the camera at construction time, load it from the calibration registry 143 * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention 144 of pyfits from this package. 150 MakeRawVisitInfoClass = MakeRawVisitInfo
153 PupilFactoryClass = afwCameraGeom.PupilFactory
155 def __init__(self, policy, repositoryDir,
156 root=None, registry=None, calibRoot=None, calibRegistry=None,
157 provided=None, parentRegistry=None, repositoryCfg=None):
158 """Initialize the CameraMapper. 162 policy : daf_persistence.Policy, 163 Can also be pexPolicy.Policy, only for backward compatibility. 164 Policy with per-camera defaults already merged. 165 repositoryDir : string 166 Policy repository for the subclassing module (obtained with 167 getRepositoryPath() on the per-camera default dictionary). 168 root : string, optional 169 Path to the root directory for data. 170 registry : string, optional 171 Path to registry with data's metadata. 172 calibRoot : string, optional 173 Root directory for calibrations. 174 calibRegistry : string, optional 175 Path to registry with calibrations' metadata. 176 provided : list of string, optional 177 Keys provided by the mapper. 178 parentRegistry : Registry subclass, optional 179 Registry from a parent repository that may be used to look up 181 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 182 The configuration information for the repository this mapper is 186 dafPersist.Mapper.__init__(self)
188 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
193 self.
root = repositoryCfg.root
196 if isinstance(policy, pexPolicy.Policy):
197 policy = dafPersist.Policy(policy)
199 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 200 if repoPolicy
is not None:
201 policy.update(repoPolicy)
203 defaultPolicyFile = dafPersist.Policy.defaultPolicyFile(
"obs_base",
204 "MapperDictionary.paf",
206 dictPolicy = dafPersist.Policy(defaultPolicyFile)
207 policy.merge(dictPolicy)
211 if 'levels' in policy:
212 levelsPolicy = policy[
'levels']
213 for key
in levelsPolicy.names(
True):
214 self.
levels[key] = set(levelsPolicy.asArray(key))
217 if 'defaultSubLevels' in policy:
223 root = dafPersist.LogicalLocation(root).locString()
233 if calibRoot
is not None:
234 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
235 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
238 calibRoot = policy.get(
'calibRoot',
None)
240 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
242 if calibStorage
is None:
250 posixIfNoSql=(
not parentRegistry))
253 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
254 if needCalibRegistry:
257 "calibRegistryPath", calibStorage)
260 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
261 "calibRoot ivar:%s or policy['calibRoot']:%s" %
262 (calibRoot, policy.get(
'calibRoot',
None)))
279 if 'defects' in policy:
280 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
281 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
282 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
293 raise ValueError(
'class variable packageName must not be None')
297 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
298 """Initialize mappings 300 For each of the dataset types that we want to be able to read, there are 301 methods that can be created to support them: 302 * map_<dataset> : determine the path for dataset 303 * std_<dataset> : standardize the retrieved dataset 304 * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery) 305 * query_<dataset> : query the registry 307 Besides the dataset types explicitly listed in the policy, we create 308 additional, derived datasets for additional conveniences, e.g., reading 309 the header of an image, retrieving only the size of a catalog. 311 @param policy (Policy) Policy with per-camera defaults already merged 312 @param rootStorage (Storage subclass instance) Interface to persisted repository data 313 @param calibRoot (Storage subclass instance) Interface to persisted calib repository data 314 @param provided (list of strings) Keys provided by the mapper 317 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318 "obs_base",
"ImageMappingDictionary.paf",
"policy"))
319 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320 "obs_base",
"ExposureMappingDictionary.paf",
"policy"))
321 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base",
"CalibrationMappingDictionary.paf",
"policy"))
323 dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324 "obs_base",
"DatasetMappingDictionary.paf",
"policy"))
328 (
"images", imgMappingPolicy, ImageMapping),
329 (
"exposures", expMappingPolicy, ExposureMapping),
330 (
"calibrations", calMappingPolicy, CalibrationMapping),
331 (
"datasets", dsMappingPolicy, DatasetMapping)
334 for name, defPolicy, cls
in mappingList:
336 datasets = policy[name]
339 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
340 if os.path.exists(defaultsPath):
341 datasets.merge(dafPersist.Policy(defaultsPath))
344 setattr(self, name, mappings)
345 for datasetType
in datasets.names(
True):
346 subPolicy = datasets[datasetType]
347 subPolicy.merge(defPolicy)
349 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
350 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
351 subPolicy=subPolicy):
352 components = subPolicy.get(
'composite')
353 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 354 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 355 python = subPolicy[
'python']
356 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
357 disassembler=disassembler,
361 for name, component
in components.items():
362 butlerComposite.add(id=name,
363 datasetType=component.get(
'datasetType'),
364 setter=component.get(
'setter',
None),
365 getter=component.get(
'getter',
None),
366 subset=component.get(
'subset',
False),
367 inputOnly=component.get(
'inputOnly',
False))
368 return butlerComposite
369 setattr(self,
"map_" + datasetType, compositeClosure)
373 if name ==
"calibrations":
375 provided=provided, dataRoot=rootStorage)
377 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
378 self.
keyDict.update(mapping.keys())
379 mappings[datasetType] = mapping
380 self.
mappings[datasetType] = mapping
381 if not hasattr(self,
"map_" + datasetType):
382 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
383 return mapping.map(mapper, dataId, write)
384 setattr(self,
"map_" + datasetType, mapClosure)
385 if not hasattr(self,
"query_" + datasetType):
386 def queryClosure(format, dataId, mapping=mapping):
387 return mapping.lookup(format, dataId)
388 setattr(self,
"query_" + datasetType, queryClosure)
389 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
390 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
391 return mapping.standardize(mapper, item, dataId)
392 setattr(self,
"std_" + datasetType, stdClosure)
394 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
395 """Set convenience methods on CameraMapper""" 396 mapName =
"map_" + datasetType +
"_" + suffix
397 bypassName =
"bypass_" + datasetType +
"_" + suffix
398 queryName =
"query_" + datasetType +
"_" + suffix
399 if not hasattr(self, mapName):
400 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
401 if not hasattr(self, bypassName):
402 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
403 bypassImpl = getattr(self,
"bypass_" + datasetType)
404 if bypassImpl
is not None:
405 setattr(self, bypassName, bypassImpl)
406 if not hasattr(self, queryName):
407 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
410 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
411 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
413 if subPolicy[
"storage"] ==
"FitsStorage":
414 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
415 afwImage.readMetadata(location.getLocationsWithRoot()[0]))
418 addName =
"add_" + datasetType
419 if not hasattr(self, addName):
422 if name ==
"exposures":
423 setMethods(
"wcs", bypassImpl=
lambda datasetType, pythonType, location, dataId:
425 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
426 setMethods(
"calib", bypassImpl=
lambda datasetType, pythonType, location, dataId:
428 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
429 setMethods(
"visitInfo",
430 bypassImpl=
lambda datasetType, pythonType, location, dataId:
432 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
434 bypassImpl=
lambda datasetType, pythonType, location, dataId:
436 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
437 setMethods(
"detector",
438 mapImpl=
lambda dataId, write=
False:
439 dafPersist.ButlerLocation(
440 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
442 storageName=
"Internal",
443 locationList=
"ignored",
448 bypassImpl=
lambda datasetType, pythonType, location, dataId:
451 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
452 afwImage.bboxFromMetadata(
453 afwImage.readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
455 elif name ==
"images":
456 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
457 afwImage.bboxFromMetadata(
458 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
460 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
461 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
462 afwImage.readMetadata(os.path.join(location.getStorage().root,
463 location.getLocations()[0]), hdu=1))
466 if subPolicy[
"storage"] ==
"FitsStorage":
467 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
468 subId = dataId.copy()
470 loc = mapping.map(mapper, subId, write)
471 bbox = dataId[
'bbox']
472 llcX = bbox.getMinX()
473 llcY = bbox.getMinY()
474 width = bbox.getWidth()
475 height = bbox.getHeight()
476 loc.additionalData.set(
'llcX', llcX)
477 loc.additionalData.set(
'llcY', llcY)
478 loc.additionalData.set(
'width', width)
479 loc.additionalData.set(
'height', height)
480 if 'imageOrigin' in dataId:
481 loc.additionalData.set(
'imageOrigin',
482 dataId[
'imageOrigin'])
485 def querySubClosure(key, format, dataId, mapping=mapping):
486 subId = dataId.copy()
488 return mapping.lookup(format, subId)
489 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
491 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
493 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
494 afwImage.readMetadata(os.path.join(location.getStorage().root,
495 location.getLocations()[0]),
496 hdu=1).get(
"NAXIS2"))
499 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
500 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
501 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
502 location.getLocations()[0])))
504 def _computeCcdExposureId(self, dataId):
505 """Compute the 64-bit (long) identifier for a CCD exposure. 507 Subclasses must override 509 @param dataId (dict) Data identifier with visit, ccd 511 raise NotImplementedError()
513 def _computeCoaddExposureId(self, dataId, singleFilter):
514 """Compute the 64-bit (long) identifier for a coadd. 516 Subclasses must override 518 @param dataId (dict) Data identifier with tract and patch. 519 @param singleFilter (bool) True means the desired ID is for a single- 520 filter coadd, in which case dataId 523 raise NotImplementedError()
525 def _search(self, path):
526 """Search for path in the associated repository's storage. 531 Path that describes an object in the repository associated with 533 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 534 indicator will be stripped when searching and so will match 535 filenames without the HDU indicator, e.g. 'foo.fits'. The path 536 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 541 The path for this object in the repository. Will return None if the 542 object can't be found. If the input argument path contained an HDU 543 indicator, the returned path will also contain the HDU indicator. 548 """Rename any existing object with the given type and dataId. 550 The CameraMapper implementation saves objects in a sequence of e.g.: 554 All of the backups will be placed in the output repo, however, and will 555 not be removed if they are found elsewhere in the _parent chain. This 556 means that the same file will be stored twice if the previous version was 557 found in an input repo. 566 def firstElement(list):
567 """Get the first element in the list, or None if that can't be done. 569 return list[0]
if list
is not None and len(list)
else None 572 newLocation = self.map(datasetType, dataId, write=
True)
573 newPath = newLocation.getLocations()[0]
574 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
575 path = firstElement(path)
577 while path
is not None:
579 oldPaths.append((n, path))
580 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
581 path = firstElement(path)
582 for n, oldPath
in reversed(oldPaths):
583 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
586 """Return supported keys. 587 @return (iterable) List of keys usable in a dataset identifier""" 591 """Return a dict of supported keys and their value types for a given dataset 592 type at a given level of the key hierarchy. 594 @param datasetType (str) dataset type or None for all dataset types 595 @param level (str) level or None for all levels or '' for the default level for the camera 596 @return (dict) dict keys are strings usable in a dataset identifier; values are their value types""" 602 if datasetType
is None:
603 keyDict = copy.copy(self.
keyDict)
606 if level
is not None and level
in self.
levels:
607 keyDict = copy.copy(keyDict)
608 for l
in self.
levels[level]:
623 """Return the name of the camera that this CameraMapper is for.""" 625 className = className[className.find(
'.'):-1]
626 m = re.search(
r'(\w+)Mapper', className)
628 m = re.search(
r"class '[\w.]*?(\w+)'", className)
630 return name[:1].lower() + name[1:]
if name
else '' 634 """Return the name of the package containing this CameraMapper.""" 636 raise ValueError(
'class variable packageName must not be None')
641 """Return the base directory of this package""" 645 """Map a camera dataset.""" 647 raise RuntimeError(
"No camera dataset available.")
649 return dafPersist.ButlerLocation(
650 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
652 storageName=
"ConfigStorage",
660 """Return the (preloaded) camera object. 663 raise RuntimeError(
"No camera dataset available.")
667 """Map defects dataset. 669 @return a very minimal ButlerLocation containing just the locationList field 670 (just enough information that bypass_defects can use it). 673 if defectFitsPath
is None:
674 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
676 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
681 """Return a defect based on the butler location returned by map_defects 683 @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file 684 @param[in] dataId: the usual data ID; "ccd" must be set 686 Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation 687 into an object, which is what we want for now, since that conversion is a bit tricky. 690 defectsFitsPath = butlerLocation.locationList[0]
691 with pyfits.open(defectsFitsPath)
as hduList:
692 for hdu
in hduList[1:]:
693 if hdu.header[
"name"] != detectorName:
697 for data
in hdu.data:
698 bbox = afwGeom.Box2I(
699 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
700 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
702 defectList.append(afwImage.DefectBase(bbox))
705 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
708 return dafPersist.ButlerLocation(
709 pythonType=
"lsst.obs.base.ExposureIdInfo",
711 storageName=
"Internal",
712 locationList=
"ignored",
719 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 720 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
721 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
725 """Disable standardization for bfKernel 727 bfKernel is a calibration product that is numpy array, 728 unlike other calibration products that are all images; 729 all calibration images are sent through _standardizeExposure 730 due to CalibrationMapping, but we don't want that to happen to bfKernel 735 """Standardize a raw dataset by converting it to an Exposure instead of an Image""" 737 trimmed=
False, setVisitInfo=
True)
740 """Map a sky policy.""" 741 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
742 "Internal",
None,
None, self,
746 """Standardize a sky policy by returning the one we use.""" 755 def _getCcdKeyVal(self, dataId):
756 """Return CCD key and value used to look a defect in the defect registry 758 The default implementation simply returns ("ccd", full detector name) 762 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
764 """Set up a registry (usually SQLite3), trying a number of possible 772 Description of registry (for log messages) 776 Policy that contains the registry name, used if path is None. 778 Key in policy for registry path. 779 storage : Storage subclass 780 Repository Storage to look in. 781 searchParents : bool, optional 782 True if the search for a registry should follow any Butler v1 784 posixIfNoSql : bool, optional 785 If an sqlite registry is not found, will create a posix registry if 790 lsst.daf.persistence.Registry 793 if path
is None and policyKey
in policy:
794 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
795 if os.path.isabs(path):
796 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
797 if not storage.exists(path):
798 newPath = storage.instanceSearch(path)
800 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 802 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
806 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
814 if path
and (path.startswith(root)):
815 path = path[len(root +
'/'):]
816 except AttributeError:
822 def search(filename, description):
823 """Search for file in storage 828 Filename to search for 830 Description of file, for error message. 834 path : `str` or `None` 835 Path to file, or None 837 result = storage.instanceSearch(filename)
840 self.
log.debug(
"Unable to locate %s: %s", description, filename)
845 path = search(
"%s.pgsql" % name,
"%s in root" % description)
847 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
849 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
852 if not storage.exists(path):
853 newPath = storage.instanceSearch(path)
854 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 855 if newPath
is not None:
857 localFileObj = storage.getLocalFile(path)
858 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
859 registry = dafPersist.Registry.create(localFileObj.name)
861 elif not registry
and posixIfNoSql:
863 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
864 registry = dafPersist.PosixRegistry(storage.root)
870 def _transformId(self, dataId):
871 """Generate a standard ID dict from a camera-specific ID dict. 873 Canonical keys include: 874 - amp: amplifier name 875 - ccd: CCD name (in LSST this is a combination of raft and sensor) 876 The default implementation returns a copy of its input. 878 @param dataId[in] (dict) Dataset identifier; this must not be modified 879 @return (dict) Transformed dataset identifier""" 883 def _mapActualToPath(self, template, actualId):
884 """Convert a template path to an actual path, using the actual data 885 identifier. This implementation is usually sufficient but can be 886 overridden by the subclass. 887 @param template (string) Template path 888 @param actualId (dict) Dataset identifier 889 @return (string) Pathname""" 893 return template % transformedId
894 except Exception
as e:
895 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
899 """Convert a CCD name to a form useful as a filename 901 The default implementation converts spaces to underscores. 903 return ccdName.replace(
" ",
"_")
905 def _extractDetectorName(self, dataId):
906 """Extract the detector (CCD) name from the dataset identifier. 908 The name in question is the detector name used by lsst.afw.cameraGeom. 910 @param dataId (dict) Dataset identifier 911 @return (string) Detector name 913 raise NotImplementedError(
"No _extractDetectorName() function specified")
915 def _extractAmpId(self, dataId):
916 """Extract the amplifier identifer from a dataset identifier. 918 @warning this is deprecated; DO NOT USE IT 920 amplifier identifier has two parts: the detector name for the CCD 921 containing the amplifier and index of the amplifier in the detector. 922 @param dataId (dict) Dataset identifer 923 @return (tuple) Amplifier identifier""" 926 return (trDataId[
"ccd"], int(trDataId[
'amp']))
928 def _setAmpDetector(self, item, dataId, trimmed=True):
929 """Set the detector object in an Exposure for an amplifier. 930 Defects are also added to the Exposure based on the detector object. 931 @param[in,out] item (lsst.afw.image.Exposure) 932 @param dataId (dict) Dataset identifier 933 @param trimmed (bool) Should detector be marked as trimmed? (ignored)""" 937 def _setCcdDetector(self, item, dataId, trimmed=True):
938 """Set the detector object in an Exposure for a CCD. 939 @param[in,out] item (lsst.afw.image.Exposure) 940 @param dataId (dict) Dataset identifier 941 @param trimmed (bool) Should detector be marked as trimmed? (ignored)""" 943 if item.getDetector()
is not None:
947 detector = self.
camera[detectorName]
948 item.setDetector(detector)
950 def _setFilter(self, mapping, item, dataId):
951 """Set the filter object in an Exposure. If the Exposure had a FILTER 952 keyword, this was already processed during load. But if it didn't, 953 use the filter from the registry. 954 @param mapping (lsst.obs.base.Mapping) 955 @param[in,out] item (lsst.afw.image.Exposure) 956 @param dataId (dict) Dataset identifier""" 958 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 959 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
962 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
965 actualId = mapping.need([
'filter'], dataId)
966 filterName = actualId[
'filter']
968 filterName = self.
filters[filterName]
969 item.setFilter(afwImage.Filter(filterName))
972 def _standardizeExposure(self, mapping, item, dataId, filter=True,
973 trimmed=True, setVisitInfo=True):
974 """Default standardization function for images. 976 This sets the Detector from the camera geometry 977 and optionally set the Fiter. In both cases this saves 978 having to persist some data in each exposure (or image). 980 @param mapping (lsst.obs.base.Mapping) 981 @param[in,out] item image-like object; any of lsst.afw.image.Exposure, 982 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 983 or lsst.afw.image.MaskedImage 984 @param dataId (dict) Dataset identifier 985 @param filter (bool) Set filter? Ignored if item is already an exposure 986 @param trimmed (bool) Should detector be marked as trimmed? 987 @param setVisitInfo (bool) Should Exposure have its VisitInfo filled out from the metadata? 988 @return (lsst.afw.image.Exposure) the standardized Exposure""" 991 except Exception
as e:
992 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
995 if mapping.level.lower() ==
"amp":
997 elif mapping.level.lower() ==
"ccd":
1005 def _defectLookup(self, dataId):
1006 """Find the defects for a given CCD. 1007 @param dataId (dict) Dataset identifier 1008 @return (string) path to the defects file or None if not available""" 1012 raise RuntimeError(
"No registry for defect lookup")
1016 dataIdForLookup = {
'visit': dataId[
'visit']}
1018 rows = self.
registry.lookup((
'taiObs'), (
'raw_visit'), dataIdForLookup)
1021 assert len(rows) == 1
1027 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1029 if not rows
or len(rows) == 0:
1032 return os.path.join(self.
defectPath, rows[0][0])
1034 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1035 (ccdVal, taiObs, len(rows),
", ".join([_[0]
for _
in rows])))
1037 def _makeCamera(self, policy, repositoryDir):
1038 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry 1040 Also set self.cameraDataLocation, if relevant (else it can be left None). 1042 This implementation assumes that policy contains an entry "camera" that points to the 1043 subdirectory in this package of camera data; specifically, that subdirectory must contain: 1044 - a file named `camera.py` that contains persisted camera config 1045 - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath 1047 @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility)) 1048 Policy with per-camera defaults already merged 1049 @param repositoryDir (string) Policy repository for the subclassing 1050 module (obtained with getRepositoryPath() on the 1051 per-camera default dictionary) 1053 if isinstance(policy, pexPolicy.Policy):
1054 policy = dafPersist.Policy(pexPolicy=policy)
1055 if 'camera' not in policy:
1056 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1057 cameraDataSubdir = policy[
'camera']
1059 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1060 cameraConfig = afwCameraGeom.CameraConfig()
1063 return afwCameraGeom.makeCameraFromPath(
1064 cameraConfig=cameraConfig,
1065 ampInfoPath=ampInfoPath,
1071 """Get the registry used by this mapper. 1076 The registry used by this mapper for this mapper's repository. 1081 """Stuff image compression settings into a daf.base.PropertySet 1083 This goes into the ButlerLocation's "additionalData", which gets 1084 passed into the boost::persistence framework. 1089 Type of dataset for which to get the image compression settings. 1095 additionalData : `lsst.daf.base.PropertySet` 1096 Image compression settings. 1098 mapping = self.
mappings[datasetType]
1099 recipeName = mapping.recipe
1100 storageType = mapping.storage
1102 return lsst.daf.base.PropertySet()
1104 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1105 (datasetType, storageType, recipeName))
1106 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1107 seed = hash(tuple(dataId.items())) % 2**31
1108 for plane
in (
"image",
"mask",
"variance"):
1109 if recipe.exists(plane +
".scaling.seed")
and recipe.get(plane +
".scaling.seed") == 0:
1110 recipe.set(plane +
".scaling.seed", seed)
1113 def _initWriteRecipes(self):
1114 """Read the recipes for writing files 1116 These recipes are currently used for configuring FITS compression, 1117 but they could have wider uses for configuring different flavors 1118 of the storage types. A recipe is referred to by a symbolic name, 1119 which has associated settings. These settings are stored as a 1120 `PropertySet` so they can easily be passed down to the 1121 boost::persistence framework as the "additionalData" parameter. 1123 The list of recipes is written in YAML. A default recipe and 1124 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1125 and these may be overridden or supplemented by the individual obs_* 1126 packages' own policy/writeRecipes.yaml files. 1128 Recipes are grouped by the storage type. Currently, only the 1129 ``FitsStorage`` storage type uses recipes, which uses it to 1130 configure FITS image compression. 1132 Each ``FitsStorage`` recipe for FITS compression should define 1133 "image", "mask" and "variance" entries, each of which may contain 1134 "compression" and "scaling" entries. Defaults will be provided for 1135 any missing elements under "compression" and "scaling". 1137 The allowed entries under "compression" are: 1139 * algorithm (string): compression algorithm to use 1140 * rows (int): number of rows per tile (0 = entire dimension) 1141 * columns (int): number of columns per tile (0 = entire dimension) 1142 * quantizeLevel (float): cfitsio quantization level 1144 The allowed entries under "scaling" are: 1146 * algorithm (string): scaling algorithm to use 1147 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1148 * fuzz (bool): fuzz the values when quantising floating-point values? 1149 * seed (long): seed for random number generator when fuzzing 1150 * maskPlanes (list of string): mask planes to ignore when doing statistics 1151 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1152 * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE) 1153 * bscale: manually specified BSCALE (for MANUAL scaling) 1154 * bzero: manually specified BSCALE (for MANUAL scaling) 1156 A very simple example YAML recipe: 1162 algorithm: GZIP_SHUFFLE 1166 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1167 recipes = dafPersist.Policy(recipesFile)
1168 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1169 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1170 supplements = dafPersist.Policy(supplementsFile)
1172 intersection = set(recipes.names()).intersection(set(supplements.names()))
1174 raise RuntimeError(
"Recipes provided in %s may not override those in %s: %s" %
1175 (supplementsFile, recipesFile, intersection))
1176 recipes.update(overrides)
1179 validationMenu = {
'FitsStorage': validateRecipeFitsStorage,}
1180 for storageType
in recipes.names(
True):
1181 if "default" not in recipes[storageType]:
1182 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1183 (storageType, recipesFile))
1184 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1188 """Generate an Exposure from an image-like object 1190 If the image is a DecoratedImage then also set its WCS and metadata 1191 (Image and MaskedImage are missing the necessary metadata 1192 and Exposure already has those set) 1194 @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure) 1195 @return (lsst.afw.image.Exposure) Exposure containing input image 1198 if isinstance(image, afwImage.MaskedImage):
1199 exposure = afwImage.makeExposure(image)
1200 elif isinstance(image, afwImage.DecoratedImage):
1201 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1202 metadata = image.getMetadata()
1204 wcs = afwImage.makeWcs(metadata,
True)
1205 exposure.setWcs(wcs)
1206 except pexExcept.InvalidParameterError
as e:
1209 logger = lsstLog.Log.getLogger(
"CameraMapper")
1210 logger.warn(
"wcs set to None; insufficient information found in metadata to create a valid wcs: " 1213 exposure.setMetadata(metadata)
1214 elif isinstance(image, afwImage.Exposure):
1217 metadata = exposure.getMetadata()
1220 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1224 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1225 if metadata
is not None:
1228 logger = lsstLog.Log.getLogger(
"CameraMapper")
1229 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1231 exposureId = mapper._computeCcdExposureId(dataId)
1232 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1234 exposure.getInfo().setVisitInfo(visitInfo)
1240 """Validate recipes for FitsStorage 1242 The recipes are supplemented with default values where appropriate. 1244 TODO: replace this custom validation code with Cerberus (DM-11846) 1248 recipes : `lsst.daf.persistence.Policy` 1249 FitsStorage recipes to validate. 1253 validated : `lsst.daf.base.PropertySet` 1254 Validated FitsStorage recipe. 1259 If validation fails. 1263 compressionSchema = {
1264 "algorithm":
"NONE",
1267 "quantizeLevel": 0.0,
1270 "algorithm":
"NONE",
1272 "maskPlanes": [
"NO_DATA"],
1274 "quantizeLevel": 4.0,
1281 def checkUnrecognized(entry, allowed, description):
1282 """Check to see if the entry contains unrecognised keywords""" 1283 unrecognized = set(entry.keys()) - set(allowed)
1286 "Unrecognized entries when parsing image compression recipe %s: %s" %
1287 (description, unrecognized))
1290 for name
in recipes.names(
True):
1291 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1292 rr = dafBase.PropertySet()
1293 validated[name] = rr
1294 for plane
in (
"image",
"mask",
"variance"):
1295 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1296 name +
"->" + plane)
1298 for settings, schema
in ((
"compression", compressionSchema),
1299 (
"scaling", scalingSchema)):
1300 prefix = plane +
"." + settings
1301 if settings
not in recipes[name][plane]:
1303 rr.set(prefix +
"." + key, schema[key])
1305 entry = recipes[name][plane][settings]
1306 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1308 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1309 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
Exposure ID and number of bits used.
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def _defectLookup(self, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.