23 from builtins
import str
29 import lsst.daf.persistence
as dafPersist
30 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base
as dafBase
32 import lsst.afw.geom
as afwGeom
33 import lsst.afw.image
as afwImage
34 import lsst.afw.table
as afwTable
35 import lsst.afw.cameraGeom
as afwCameraGeom
36 import lsst.log
as lsstLog
37 import lsst.pex.policy
as pexPolicy
38 import lsst.pex.exceptions
as pexExcept
39 from .exposureIdInfo
import ExposureIdInfo
40 from .makeRawVisitInfo
import MakeRawVisitInfo
41 from lsst.utils
import getPackageDir
43 """This module defines the CameraMapper base class.""" 48 """CameraMapper is a base class for mappers that handle images from a 49 camera and products derived from them. This provides an abstraction layer 50 between the data on disk and the code. 52 Public methods: keys, queryMetadata, getDatasetTypes, map, 53 canStandardize, standardize 55 Mappers for specific data sources (e.g., CFHT Megacam, LSST 56 simulations, etc.) should inherit this class. 58 The CameraMapper manages datasets within a "root" directory. Note that 59 writing to a dataset present in the input root will hide the existing 60 dataset but not overwrite it. See #2160 for design discussion. 62 A camera is assumed to consist of one or more rafts, each composed of 63 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 64 (amps). A camera is also assumed to have a camera geometry description 65 (CameraGeom object) as a policy file, a filter description (Filter class 66 static configuration) as another policy file, and an optional defects 67 description directory. 69 Information from the camera geometry and defects are inserted into all 70 Exposure objects returned. 72 The mapper uses one or two registries to retrieve metadata about the 73 images. The first is a registry of all raw exposures. This must contain 74 the time of the observation. One or more tables (or the equivalent) 75 within the registry are used to look up data identifier components that 76 are not specified by the user (e.g. filter) and to return results for 77 metadata queries. The second is an optional registry of all calibration 78 data. This should contain validity start and end entries for each 79 calibration dataset in the same timescale as the observation time. 81 Subclasses will typically set MakeRawVisitInfoClass: 83 MakeRawVisitInfoClass: a class variable that points to a subclass of 84 MakeRawVisitInfo, a functor that creates an 85 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 87 Subclasses must provide the following methods: 89 _extractDetectorName(self, dataId): returns the detector name for a CCD 90 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 91 a dataset identifier referring to that CCD or a subcomponent of it. 93 _computeCcdExposureId(self, dataId): see below 95 _computeCoaddExposureId(self, dataId, singleFilter): see below 97 Subclasses may also need to override the following methods: 99 _transformId(self, dataId): transformation of a data identifier 100 from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"), 101 including making suitable for path expansion (e.g. removing commas). 102 The default implementation does nothing. Note that this 103 method should not modify its input parameter. 105 getShortCcdName(self, ccdName): a static method that returns a shortened name 106 suitable for use as a filename. The default version converts spaces to underscores. 108 _getCcdKeyVal(self, dataId): return a CCD key and value 109 by which to look up defects in the defects registry. 110 The default value returns ("ccd", detector name) 112 _mapActualToPath(self, template, actualId): convert a template path to an 113 actual path, using the actual dataset identifier. 115 The mapper's behaviors are largely specified by the policy file. 116 See the MapperDictionary.paf for descriptions of the available items. 118 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 119 mappings (see Mappings class). 121 Common default mappings for all subclasses can be specified in the 122 "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides 123 a simple way to add a product to all camera mappers. 125 Functions to map (provide a path to the data given a dataset 126 identifier dictionary) and standardize (convert data into some standard 127 format or type) may be provided in the subclass as "map_{dataset type}" 128 and "std_{dataset type}", respectively. 130 If non-Exposure datasets cannot be retrieved using standard 131 daf_persistence methods alone, a "bypass_{dataset type}" function may be 132 provided in the subclass to return the dataset instead of using the 133 "datasets" subpolicy. 135 Implementations of map_camera and bypass_camera that should typically be 136 sufficient are provided in this base class. 139 * Handle defects the same was as all other calibration products, using the calibration registry 140 * Instead of auto-loading the camera at construction time, load it from the calibration registry 141 * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention 142 of pyfits from this package. 148 MakeRawVisitInfoClass = MakeRawVisitInfo
151 PupilFactoryClass = afwCameraGeom.PupilFactory
153 def __init__(self, policy, repositoryDir,
154 root=None, registry=None, calibRoot=None, calibRegistry=None,
155 provided=None, parentRegistry=None, repositoryCfg=None):
156 """Initialize the CameraMapper. 160 policy : daf_persistence.Policy, 161 Can also be pexPolicy.Policy, only for backward compatibility. 162 Policy with per-camera defaults already merged. 163 repositoryDir : string 164 Policy repository for the subclassing module (obtained with 165 getRepositoryPath() on the per-camera default dictionary). 166 root : string, optional 167 Path to the root directory for data. 168 registry : string, optional 169 Path to registry with data's metadata. 170 calibRoot : string, optional 171 Root directory for calibrations. 172 calibRegistry : string, optional 173 Path to registry with calibrations' metadata. 174 provided : list of string, optional 175 Keys provided by the mapper. 176 parentRegistry : Registry subclass, optional 177 Registry from a parent repository that may be used to look up 179 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 180 The configuration information for the repository this mapper is 184 dafPersist.Mapper.__init__(self)
186 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
191 self.
root = repositoryCfg.root
194 if isinstance(policy, pexPolicy.Policy):
195 policy = dafPersist.Policy(policy)
197 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 198 if repoPolicy
is not None:
199 policy.update(repoPolicy)
201 defaultPolicyFile = dafPersist.Policy.defaultPolicyFile(
"obs_base",
202 "MapperDictionary.paf",
204 dictPolicy = dafPersist.Policy(defaultPolicyFile)
205 policy.merge(dictPolicy)
209 if 'levels' in policy:
210 levelsPolicy = policy[
'levels']
211 for key
in levelsPolicy.names(
True):
212 self.
levels[key] = set(levelsPolicy.asArray(key))
215 if 'defaultSubLevels' in policy:
221 root = dafPersist.LogicalLocation(root).locString()
231 if calibRoot
is not None:
232 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
233 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
236 calibRoot = policy.get(
'calibRoot',
None)
238 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
240 if calibStorage
is None:
248 posixIfNoSql=(
not parentRegistry))
251 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
252 if needCalibRegistry:
255 "calibRegistryPath", calibStorage)
258 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
259 "calibRoot ivar:%s or policy['calibRoot']:%s" %
260 (calibRoot, policy.get(
'calibRoot',
None)))
277 if 'defects' in policy:
278 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
279 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
280 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
291 raise ValueError(
'class variable packageName must not be None')
295 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
296 """Initialize mappings 298 For each of the dataset types that we want to be able to read, there are 299 methods that can be created to support them: 300 * map_<dataset> : determine the path for dataset 301 * std_<dataset> : standardize the retrieved dataset 302 * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery) 303 * query_<dataset> : query the registry 305 Besides the dataset types explicitly listed in the policy, we create 306 additional, derived datasets for additional conveniences, e.g., reading 307 the header of an image, retrieving only the size of a catalog. 309 @param policy (Policy) Policy with per-camera defaults already merged 310 @param rootStorage (Storage subclass instance) Interface to persisted repository data 311 @param calibRoot (Storage subclass instance) Interface to persisted calib repository data 312 @param provided (list of strings) Keys provided by the mapper 315 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
316 "obs_base",
"ImageMappingDictionary.paf",
"policy"))
317 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318 "obs_base",
"ExposureMappingDictionary.paf",
"policy"))
319 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320 "obs_base",
"CalibrationMappingDictionary.paf",
"policy"))
321 dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base",
"DatasetMappingDictionary.paf",
"policy"))
326 (
"images", imgMappingPolicy, ImageMapping),
327 (
"exposures", expMappingPolicy, ExposureMapping),
328 (
"calibrations", calMappingPolicy, CalibrationMapping),
329 (
"datasets", dsMappingPolicy, DatasetMapping)
332 for name, defPolicy, cls
in mappingList:
334 datasets = policy[name]
337 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
338 if os.path.exists(defaultsPath):
339 datasets.merge(dafPersist.Policy(defaultsPath))
342 setattr(self, name, mappings)
343 for datasetType
in datasets.names(
True):
344 subPolicy = datasets[datasetType]
345 subPolicy.merge(defPolicy)
347 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
348 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
349 subPolicy=subPolicy):
350 components = subPolicy.get(
'composite')
351 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 352 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 353 python = subPolicy[
'python']
354 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
355 disassembler=disassembler,
359 for name, component
in components.items():
360 butlerComposite.add(id=name,
361 datasetType=component.get(
'datasetType'),
362 setter=component.get(
'setter',
None),
363 getter=component.get(
'getter',
None),
364 subset=component.get(
'subset',
False),
365 inputOnly=component.get(
'inputOnly',
False))
366 return butlerComposite
367 setattr(self,
"map_" + datasetType, compositeClosure)
371 if name ==
"calibrations":
373 provided=provided, dataRoot=rootStorage)
375 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
376 self.
keyDict.update(mapping.keys())
377 mappings[datasetType] = mapping
378 self.
mappings[datasetType] = mapping
379 if not hasattr(self,
"map_" + datasetType):
380 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
381 return mapping.map(mapper, dataId, write)
382 setattr(self,
"map_" + datasetType, mapClosure)
383 if not hasattr(self,
"query_" + datasetType):
384 def queryClosure(format, dataId, mapping=mapping):
385 return mapping.lookup(format, dataId)
386 setattr(self,
"query_" + datasetType, queryClosure)
387 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
388 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
389 return mapping.standardize(mapper, item, dataId)
390 setattr(self,
"std_" + datasetType, stdClosure)
392 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
393 """Set convenience methods on CameraMapper""" 394 mapName =
"map_" + datasetType +
"_" + suffix
395 bypassName =
"bypass_" + datasetType +
"_" + suffix
396 queryName =
"query_" + datasetType +
"_" + suffix
397 if not hasattr(self, mapName):
398 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
399 if not hasattr(self, bypassName):
400 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
401 bypassImpl = getattr(self,
"bypass_" + datasetType)
402 if bypassImpl
is not None:
403 setattr(self, bypassName, bypassImpl)
404 if not hasattr(self, queryName):
405 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
408 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
409 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
411 if subPolicy[
"storage"] ==
"FitsStorage":
412 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
413 afwImage.readMetadata(location.getLocationsWithRoot()[0]))
416 addName =
"add_" + datasetType
417 if not hasattr(self, addName):
420 if name ==
"exposures":
421 setMethods(
"wcs", bypassImpl=
lambda datasetType, pythonType, location, dataId:
423 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
424 setMethods(
"calib", bypassImpl=
lambda datasetType, pythonType, location, dataId:
426 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
427 setMethods(
"visitInfo",
428 bypassImpl=
lambda datasetType, pythonType, location, dataId:
430 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
432 bypassImpl=
lambda datasetType, pythonType, location, dataId:
434 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
435 setMethods(
"detector",
436 mapImpl=
lambda dataId, write=
False:
437 dafPersist.ButlerLocation(
438 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
440 storageName=
"Internal",
441 locationList=
"ignored",
446 bypassImpl=
lambda datasetType, pythonType, location, dataId:
449 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
450 afwImage.bboxFromMetadata(
451 afwImage.readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
453 elif name ==
"images":
454 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
455 afwImage.bboxFromMetadata(
456 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
458 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
459 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
460 afwImage.readMetadata(os.path.join(location.getStorage().root,
461 location.getLocations()[0]), hdu=1))
464 if subPolicy[
"storage"] ==
"FitsStorage":
465 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
466 subId = dataId.copy()
468 loc = mapping.map(mapper, subId, write)
469 bbox = dataId[
'bbox']
470 llcX = bbox.getMinX()
471 llcY = bbox.getMinY()
472 width = bbox.getWidth()
473 height = bbox.getHeight()
474 loc.additionalData.set(
'llcX', llcX)
475 loc.additionalData.set(
'llcY', llcY)
476 loc.additionalData.set(
'width', width)
477 loc.additionalData.set(
'height', height)
478 if 'imageOrigin' in dataId:
479 loc.additionalData.set(
'imageOrigin',
480 dataId[
'imageOrigin'])
483 def querySubClosure(key, format, dataId, mapping=mapping):
484 subId = dataId.copy()
486 return mapping.lookup(format, subId)
487 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
489 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
491 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
492 afwImage.readMetadata(os.path.join(location.getStorage().root,
493 location.getLocations()[0]),
494 hdu=1).get(
"NAXIS2"))
497 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
498 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
499 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
500 location.getLocations()[0])))
502 def _computeCcdExposureId(self, dataId):
503 """Compute the 64-bit (long) identifier for a CCD exposure. 505 Subclasses must override 507 @param dataId (dict) Data identifier with visit, ccd 509 raise NotImplementedError()
511 def _computeCoaddExposureId(self, dataId, singleFilter):
512 """Compute the 64-bit (long) identifier for a coadd. 514 Subclasses must override 516 @param dataId (dict) Data identifier with tract and patch. 517 @param singleFilter (bool) True means the desired ID is for a single- 518 filter coadd, in which case dataId 521 raise NotImplementedError()
523 def _search(self, path):
524 """Search for path in the associated repository's storage. 529 Path that describes an object in the repository associated with 531 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 532 indicator will be stripped when searching and so will match 533 filenames without the HDU indicator, e.g. 'foo.fits'. The path 534 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 539 The path for this object in the repository. Will return None if the 540 object can't be found. If the input argument path contained an HDU 541 indicator, the returned path will also contain the HDU indicator. 546 """Rename any existing object with the given type and dataId. 548 The CameraMapper implementation saves objects in a sequence of e.g.: 552 All of the backups will be placed in the output repo, however, and will 553 not be removed if they are found elsewhere in the _parent chain. This 554 means that the same file will be stored twice if the previous version was 555 found in an input repo. 564 def firstElement(list):
565 """Get the first element in the list, or None if that can't be done. 567 return list[0]
if list
is not None and len(list)
else None 570 newLocation = self.map(datasetType, dataId, write=
True)
571 newPath = newLocation.getLocations()[0]
572 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
573 path = firstElement(path)
575 while path
is not None:
577 oldPaths.append((n, path))
578 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
579 path = firstElement(path)
580 for n, oldPath
in reversed(oldPaths):
581 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
584 """Return supported keys. 585 @return (iterable) List of keys usable in a dataset identifier""" 589 """Return a dict of supported keys and their value types for a given dataset 590 type at a given level of the key hierarchy. 592 @param datasetType (str) dataset type or None for all dataset types 593 @param level (str) level or None for all levels or '' for the default level for the camera 594 @return (dict) dict keys are strings usable in a dataset identifier; values are their value types""" 600 if datasetType
is None:
601 keyDict = copy.copy(self.
keyDict)
604 if level
is not None and level
in self.
levels:
605 keyDict = copy.copy(keyDict)
606 for l
in self.
levels[level]:
621 """Return the name of the camera that this CameraMapper is for.""" 623 className = className[className.find(
'.'):-1]
624 m = re.search(
r'(\w+)Mapper', className)
626 m = re.search(
r"class '[\w.]*?(\w+)'", className)
628 return name[:1].lower() + name[1:]
if name
else '' 632 """Return the name of the package containing this CameraMapper.""" 634 raise ValueError(
'class variable packageName must not be None')
639 """Return the base directory of this package""" 643 """Map a camera dataset.""" 645 raise RuntimeError(
"No camera dataset available.")
647 return dafPersist.ButlerLocation(
648 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
650 storageName=
"ConfigStorage",
658 """Return the (preloaded) camera object. 661 raise RuntimeError(
"No camera dataset available.")
665 """Map defects dataset. 667 @return a very minimal ButlerLocation containing just the locationList field 668 (just enough information that bypass_defects can use it). 671 if defectFitsPath
is None:
672 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
674 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
679 """Return a defect based on the butler location returned by map_defects 681 @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file 682 @param[in] dataId: the usual data ID; "ccd" must be set 684 Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation 685 into an object, which is what we want for now, since that conversion is a bit tricky. 688 defectsFitsPath = butlerLocation.locationList[0]
689 with pyfits.open(defectsFitsPath)
as hduList:
690 for hdu
in hduList[1:]:
691 if hdu.header[
"name"] != detectorName:
695 for data
in hdu.data:
696 bbox = afwGeom.Box2I(
697 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
698 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
700 defectList.append(afwImage.DefectBase(bbox))
703 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
706 return dafPersist.ButlerLocation(
707 pythonType=
"lsst.obs.base.ExposureIdInfo",
709 storageName=
"Internal",
710 locationList=
"ignored",
717 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 718 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
719 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
723 """Disable standardization for bfKernel 725 bfKernel is a calibration product that is numpy array, 726 unlike other calibration products that are all images; 727 all calibration images are sent through _standardizeExposure 728 due to CalibrationMapping, but we don't want that to happen to bfKernel 733 """Standardize a raw dataset by converting it to an Exposure instead of an Image""" 735 trimmed=
False, setVisitInfo=
True)
738 """Map a sky policy.""" 739 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
740 "Internal",
None,
None, self,
744 """Standardize a sky policy by returning the one we use.""" 753 def _getCcdKeyVal(self, dataId):
754 """Return CCD key and value used to look a defect in the defect registry 756 The default implementation simply returns ("ccd", full detector name) 760 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
762 """Set up a registry (usually SQLite3), trying a number of possible 770 Description of registry (for log messages) 774 Policy that contains the registry name, used if path is None. 776 Key in policy for registry path. 777 storage : Storage subclass 778 Repository Storage to look in. 779 searchParents : bool, optional 780 True if the search for a registry should follow any Butler v1 782 posixIfNoSql : bool, optional 783 If an sqlite registry is not found, will create a posix registry if 788 lsst.daf.persistence.Registry 791 if path
is None and policyKey
in policy:
792 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
793 if os.path.isabs(path):
794 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
795 if not storage.exists(path):
796 newPath = storage.instanceSearch(path)
798 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 800 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
804 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
812 if path
and (path.startswith(root)):
813 path = path[len(root +
'/'):]
814 except AttributeError:
820 def search(filename, description):
821 """Search for file in storage 826 Filename to search for 828 Description of file, for error message. 832 path : `str` or `None` 833 Path to file, or None 835 result = storage.instanceSearch(filename)
838 self.
log.debug(
"Unable to locate %s: %s", description, filename)
843 path = search(
"%s.pgsql" % name,
"%s in root" % description)
845 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
847 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
850 if not storage.exists(path):
851 newPath = storage.instanceSearch(path)
852 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 853 if newPath
is not None:
855 localFileObj = storage.getLocalFile(path)
856 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
857 registry = dafPersist.Registry.create(localFileObj.name)
859 elif not registry
and posixIfNoSql:
861 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
862 registry = dafPersist.PosixRegistry(storage.root)
868 def _transformId(self, dataId):
869 """Generate a standard ID dict from a camera-specific ID dict. 871 Canonical keys include: 872 - amp: amplifier name 873 - ccd: CCD name (in LSST this is a combination of raft and sensor) 874 The default implementation returns a copy of its input. 876 @param dataId[in] (dict) Dataset identifier; this must not be modified 877 @return (dict) Transformed dataset identifier""" 881 def _mapActualToPath(self, template, actualId):
882 """Convert a template path to an actual path, using the actual data 883 identifier. This implementation is usually sufficient but can be 884 overridden by the subclass. 885 @param template (string) Template path 886 @param actualId (dict) Dataset identifier 887 @return (string) Pathname""" 891 return template % transformedId
892 except Exception
as e:
893 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
897 """Convert a CCD name to a form useful as a filename 899 The default implementation converts spaces to underscores. 901 return ccdName.replace(
" ",
"_")
903 def _extractDetectorName(self, dataId):
904 """Extract the detector (CCD) name from the dataset identifier. 906 The name in question is the detector name used by lsst.afw.cameraGeom. 908 @param dataId (dict) Dataset identifier 909 @return (string) Detector name 911 raise NotImplementedError(
"No _extractDetectorName() function specified")
913 def _extractAmpId(self, dataId):
914 """Extract the amplifier identifer from a dataset identifier. 916 @warning this is deprecated; DO NOT USE IT 918 amplifier identifier has two parts: the detector name for the CCD 919 containing the amplifier and index of the amplifier in the detector. 920 @param dataId (dict) Dataset identifer 921 @return (tuple) Amplifier identifier""" 924 return (trDataId[
"ccd"], int(trDataId[
'amp']))
926 def _setAmpDetector(self, item, dataId, trimmed=True):
927 """Set the detector object in an Exposure for an amplifier. 928 Defects are also added to the Exposure based on the detector object. 929 @param[in,out] item (lsst.afw.image.Exposure) 930 @param dataId (dict) Dataset identifier 931 @param trimmed (bool) Should detector be marked as trimmed? (ignored)""" 935 def _setCcdDetector(self, item, dataId, trimmed=True):
936 """Set the detector object in an Exposure for a CCD. 937 @param[in,out] item (lsst.afw.image.Exposure) 938 @param dataId (dict) Dataset identifier 939 @param trimmed (bool) Should detector be marked as trimmed? (ignored)""" 941 if item.getDetector()
is not None:
945 detector = self.
camera[detectorName]
946 item.setDetector(detector)
948 def _setFilter(self, mapping, item, dataId):
949 """Set the filter object in an Exposure. If the Exposure had a FILTER 950 keyword, this was already processed during load. But if it didn't, 951 use the filter from the registry. 952 @param mapping (lsst.obs.base.Mapping) 953 @param[in,out] item (lsst.afw.image.Exposure) 954 @param dataId (dict) Dataset identifier""" 956 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 957 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
960 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
963 actualId = mapping.need([
'filter'], dataId)
964 filterName = actualId[
'filter']
966 filterName = self.
filters[filterName]
967 item.setFilter(afwImage.Filter(filterName))
970 def _standardizeExposure(self, mapping, item, dataId, filter=True,
971 trimmed=True, setVisitInfo=True):
972 """Default standardization function for images. 974 This sets the Detector from the camera geometry 975 and optionally set the Fiter. In both cases this saves 976 having to persist some data in each exposure (or image). 978 @param mapping (lsst.obs.base.Mapping) 979 @param[in,out] item image-like object; any of lsst.afw.image.Exposure, 980 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 981 or lsst.afw.image.MaskedImage 982 @param dataId (dict) Dataset identifier 983 @param filter (bool) Set filter? Ignored if item is already an exposure 984 @param trimmed (bool) Should detector be marked as trimmed? 985 @param setVisitInfo (bool) Should Exposure have its VisitInfo filled out from the metadata? 986 @return (lsst.afw.image.Exposure) the standardized Exposure""" 989 except Exception
as e:
990 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
993 if mapping.level.lower() ==
"amp":
995 elif mapping.level.lower() ==
"ccd":
1003 def _defectLookup(self, dataId):
1004 """Find the defects for a given CCD. 1005 @param dataId (dict) Dataset identifier 1006 @return (string) path to the defects file or None if not available""" 1010 raise RuntimeError(
"No registry for defect lookup")
1014 dataIdForLookup = {
'visit': dataId[
'visit']}
1016 rows = self.
registry.lookup((
'taiObs'), (
'raw_visit'), dataIdForLookup)
1019 assert len(rows) == 1
1025 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1027 if not rows
or len(rows) == 0:
1030 return os.path.join(self.
defectPath, rows[0][0])
1032 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1033 (ccdVal, taiObs, len(rows),
", ".join([_[0]
for _
in rows])))
1035 def _makeCamera(self, policy, repositoryDir):
1036 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry 1038 Also set self.cameraDataLocation, if relevant (else it can be left None). 1040 This implementation assumes that policy contains an entry "camera" that points to the 1041 subdirectory in this package of camera data; specifically, that subdirectory must contain: 1042 - a file named `camera.py` that contains persisted camera config 1043 - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath 1045 @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility)) 1046 Policy with per-camera defaults already merged 1047 @param repositoryDir (string) Policy repository for the subclassing 1048 module (obtained with getRepositoryPath() on the 1049 per-camera default dictionary) 1051 if isinstance(policy, pexPolicy.Policy):
1052 policy = dafPersist.Policy(pexPolicy=policy)
1053 if 'camera' not in policy:
1054 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1055 cameraDataSubdir = policy[
'camera']
1057 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1058 cameraConfig = afwCameraGeom.CameraConfig()
1061 return afwCameraGeom.makeCameraFromPath(
1062 cameraConfig=cameraConfig,
1063 ampInfoPath=ampInfoPath,
1069 """Get the registry used by this mapper. 1074 The registry used by this mapper for this mapper's repository. 1079 """Stuff image compression settings into a daf.base.PropertySet 1081 This goes into the ButlerLocation's "additionalData", which gets 1082 passed into the boost::persistence framework. 1087 Type of dataset for which to get the image compression settings. 1093 additionalData : `lsst.daf.base.PropertySet` 1094 Image compression settings. 1096 mapping = self.
mappings[datasetType]
1097 recipeName = mapping.recipe
1098 storageType = mapping.storage
1100 return dafBase.PropertySet()
1102 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1103 (datasetType, storageType, recipeName))
1104 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1105 seed = hash(tuple(dataId.items())) % 2**31
1106 for plane
in (
"image",
"mask",
"variance"):
1107 if recipe.exists(plane +
".scaling.seed")
and recipe.get(plane +
".scaling.seed") == 0:
1108 recipe.set(plane +
".scaling.seed", seed)
1111 def _initWriteRecipes(self):
1112 """Read the recipes for writing files 1114 These recipes are currently used for configuring FITS compression, 1115 but they could have wider uses for configuring different flavors 1116 of the storage types. A recipe is referred to by a symbolic name, 1117 which has associated settings. These settings are stored as a 1118 `PropertySet` so they can easily be passed down to the 1119 boost::persistence framework as the "additionalData" parameter. 1121 The list of recipes is written in YAML. A default recipe and 1122 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1123 and these may be overridden or supplemented by the individual obs_* 1124 packages' own policy/writeRecipes.yaml files. 1126 Recipes are grouped by the storage type. Currently, only the 1127 ``FitsStorage`` storage type uses recipes, which uses it to 1128 configure FITS image compression. 1130 Each ``FitsStorage`` recipe for FITS compression should define 1131 "image", "mask" and "variance" entries, each of which may contain 1132 "compression" and "scaling" entries. Defaults will be provided for 1133 any missing elements under "compression" and "scaling". 1135 The allowed entries under "compression" are: 1137 * algorithm (string): compression algorithm to use 1138 * rows (int): number of rows per tile (0 = entire dimension) 1139 * columns (int): number of columns per tile (0 = entire dimension) 1140 * quantizeLevel (float): cfitsio quantization level 1142 The allowed entries under "scaling" are: 1144 * algorithm (string): scaling algorithm to use 1145 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1146 * fuzz (bool): fuzz the values when quantising floating-point values? 1147 * seed (long): seed for random number generator when fuzzing 1148 * maskPlanes (list of string): mask planes to ignore when doing statistics 1149 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1150 * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE) 1151 * bscale: manually specified BSCALE (for MANUAL scaling) 1152 * bzero: manually specified BSCALE (for MANUAL scaling) 1154 A very simple example YAML recipe: 1160 algorithm: GZIP_SHUFFLE 1164 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1165 recipes = dafPersist.Policy(recipesFile)
1166 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1167 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1168 supplements = dafPersist.Policy(supplementsFile)
1170 intersection = set(recipes.names()).intersection(set(supplements.names()))
1172 raise RuntimeError(
"Recipes provided in %s may not override those in %s: %s" %
1173 (supplementsFile, recipesFile, intersection))
1174 recipes.update(overrides)
1177 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1178 for storageType
in recipes.names(
True):
1179 if "default" not in recipes[storageType]:
1180 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1181 (storageType, recipesFile))
1182 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1186 """Generate an Exposure from an image-like object 1188 If the image is a DecoratedImage then also set its WCS and metadata 1189 (Image and MaskedImage are missing the necessary metadata 1190 and Exposure already has those set) 1192 @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure) 1193 @return (lsst.afw.image.Exposure) Exposure containing input image 1196 if isinstance(image, afwImage.MaskedImage):
1197 exposure = afwImage.makeExposure(image)
1198 elif isinstance(image, afwImage.DecoratedImage):
1199 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1200 metadata = image.getMetadata()
1202 wcs = afwImage.makeWcs(metadata,
True)
1203 exposure.setWcs(wcs)
1204 except pexExcept.InvalidParameterError
as e:
1207 logger = lsstLog.Log.getLogger(
"CameraMapper")
1208 logger.warn(
"wcs set to None; insufficient information found in metadata to create a valid wcs: " 1211 exposure.setMetadata(metadata)
1212 elif isinstance(image, afwImage.Exposure):
1215 metadata = exposure.getMetadata()
1218 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1222 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1223 if metadata
is not None:
1226 logger = lsstLog.Log.getLogger(
"CameraMapper")
1227 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1229 exposureId = mapper._computeCcdExposureId(dataId)
1230 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1232 exposure.getInfo().setVisitInfo(visitInfo)
1238 """Validate recipes for FitsStorage 1240 The recipes are supplemented with default values where appropriate. 1242 TODO: replace this custom validation code with Cerberus (DM-11846) 1246 recipes : `lsst.daf.persistence.Policy` 1247 FitsStorage recipes to validate. 1251 validated : `lsst.daf.base.PropertySet` 1252 Validated FitsStorage recipe. 1257 If validation fails. 1261 compressionSchema = {
1262 "algorithm":
"NONE",
1265 "quantizeLevel": 0.0,
1268 "algorithm":
"NONE",
1270 "maskPlanes": [
"NO_DATA"],
1272 "quantizeLevel": 4.0,
1279 def checkUnrecognized(entry, allowed, description):
1280 """Check to see if the entry contains unrecognised keywords""" 1281 unrecognized = set(entry.keys()) - set(allowed)
1284 "Unrecognized entries when parsing image compression recipe %s: %s" %
1285 (description, unrecognized))
1288 for name
in recipes.names(
True):
1289 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1290 rr = dafBase.PropertySet()
1291 validated[name] = rr
1292 for plane
in (
"image",
"mask",
"variance"):
1293 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1294 name +
"->" + plane)
1296 for settings, schema
in ((
"compression", compressionSchema),
1297 (
"scaling", scalingSchema)):
1298 prefix = plane +
"." + settings
1299 if settings
not in recipes[name][plane]:
1301 rr.set(prefix +
"." + key, schema[key])
1303 entry = recipes[name][plane][settings]
1304 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1306 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1307 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
Exposure ID and number of bits used.
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def _defectLookup(self, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.