23 from builtins
import str
29 import lsst.daf.persistence
as dafPersist
30 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base
as dafBase
32 import lsst.afw.geom
as afwGeom
33 import lsst.afw.image
as afwImage
34 import lsst.afw.table
as afwTable
35 import lsst.afw.cameraGeom
as afwCameraGeom
36 import lsst.log
as lsstLog
37 import lsst.pex.policy
as pexPolicy
39 from .exposureIdInfo
import ExposureIdInfo
40 from .makeRawVisitInfo
import MakeRawVisitInfo
43 """This module defines the CameraMapper base class.""" 48 """CameraMapper is a base class for mappers that handle images from a 49 camera and products derived from them. This provides an abstraction layer 50 between the data on disk and the code. 52 Public methods: keys, queryMetadata, getDatasetTypes, map, 53 canStandardize, standardize 55 Mappers for specific data sources (e.g., CFHT Megacam, LSST 56 simulations, etc.) should inherit this class. 58 The CameraMapper manages datasets within a "root" directory. Note that 59 writing to a dataset present in the input root will hide the existing 60 dataset but not overwrite it. See #2160 for design discussion. 62 A camera is assumed to consist of one or more rafts, each composed of 63 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 64 (amps). A camera is also assumed to have a camera geometry description 65 (CameraGeom object) as a policy file, a filter description (Filter class 66 static configuration) as another policy file, and an optional defects 67 description directory. 69 Information from the camera geometry and defects are inserted into all 70 Exposure objects returned. 72 The mapper uses one or two registries to retrieve metadata about the 73 images. The first is a registry of all raw exposures. This must contain 74 the time of the observation. One or more tables (or the equivalent) 75 within the registry are used to look up data identifier components that 76 are not specified by the user (e.g. filter) and to return results for 77 metadata queries. The second is an optional registry of all calibration 78 data. This should contain validity start and end entries for each 79 calibration dataset in the same timescale as the observation time. 81 Subclasses will typically set MakeRawVisitInfoClass: 83 MakeRawVisitInfoClass: a class variable that points to a subclass of 84 MakeRawVisitInfo, a functor that creates an 85 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 87 Subclasses must provide the following methods: 89 _extractDetectorName(self, dataId): returns the detector name for a CCD 90 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 91 a dataset identifier referring to that CCD or a subcomponent of it. 93 _computeCcdExposureId(self, dataId): see below 95 _computeCoaddExposureId(self, dataId, singleFilter): see below 97 Subclasses may also need to override the following methods: 99 _transformId(self, dataId): transformation of a data identifier 100 from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"), 101 including making suitable for path expansion (e.g. removing commas). 102 The default implementation does nothing. Note that this 103 method should not modify its input parameter. 105 getShortCcdName(self, ccdName): a static method that returns a shortened name 106 suitable for use as a filename. The default version converts spaces to underscores. 108 _getCcdKeyVal(self, dataId): return a CCD key and value 109 by which to look up defects in the defects registry. 110 The default value returns ("ccd", detector name) 112 _mapActualToPath(self, template, actualId): convert a template path to an 113 actual path, using the actual dataset identifier. 115 The mapper's behaviors are largely specified by the policy file. 116 See the MapperDictionary.paf for descriptions of the available items. 118 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 119 mappings (see Mappings class). 121 Common default mappings for all subclasses can be specified in the 122 "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides 123 a simple way to add a product to all camera mappers. 125 Functions to map (provide a path to the data given a dataset 126 identifier dictionary) and standardize (convert data into some standard 127 format or type) may be provided in the subclass as "map_{dataset type}" 128 and "std_{dataset type}", respectively. 130 If non-Exposure datasets cannot be retrieved using standard 131 daf_persistence methods alone, a "bypass_{dataset type}" function may be 132 provided in the subclass to return the dataset instead of using the 133 "datasets" subpolicy. 135 Implementations of map_camera and bypass_camera that should typically be 136 sufficient are provided in this base class. 139 * Handle defects the same was as all other calibration products, using the calibration registry 140 * Instead of auto-loading the camera at construction time, load it from the calibration registry 141 * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention 142 of pyfits from this package. 148 MakeRawVisitInfoClass = MakeRawVisitInfo
151 PupilFactoryClass = afwCameraGeom.PupilFactory
153 def __init__(self, policy, repositoryDir,
154 root=None, registry=None, calibRoot=None, calibRegistry=None,
155 provided=None, parentRegistry=None, repositoryCfg=None):
156 """Initialize the CameraMapper. 160 policy : daf_persistence.Policy, 161 Can also be pexPolicy.Policy, only for backward compatibility. 162 Policy with per-camera defaults already merged. 163 repositoryDir : string 164 Policy repository for the subclassing module (obtained with 165 getRepositoryPath() on the per-camera default dictionary). 166 root : string, optional 167 Path to the root directory for data. 168 registry : string, optional 169 Path to registry with data's metadata. 170 calibRoot : string, optional 171 Root directory for calibrations. 172 calibRegistry : string, optional 173 Path to registry with calibrations' metadata. 174 provided : list of string, optional 175 Keys provided by the mapper. 176 parentRegistry : Registry subclass, optional 177 Registry from a parent repository that may be used to look up 179 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 180 The configuration information for the repository this mapper is 184 dafPersist.Mapper.__init__(self)
186 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
191 self.
root = repositoryCfg.root
194 if isinstance(policy, pexPolicy.Policy):
195 policy = dafPersist.Policy(policy)
197 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 198 if repoPolicy
is not None:
199 policy.update(repoPolicy)
201 defaultPolicyFile = dafPersist.Policy.defaultPolicyFile(
"obs_base",
202 "MapperDictionary.paf",
204 dictPolicy = dafPersist.Policy(defaultPolicyFile)
205 policy.merge(dictPolicy)
209 if 'levels' in policy:
210 levelsPolicy = policy[
'levels']
211 for key
in levelsPolicy.names(
True):
212 self.
levels[key] = set(levelsPolicy.asArray(key))
215 if 'defaultSubLevels' in policy:
221 root = dafPersist.LogicalLocation(root).locString()
231 if calibRoot
is not None:
232 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
233 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
236 calibRoot = policy.get(
'calibRoot',
None)
238 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
240 if calibStorage
is None:
248 posixIfNoSql=(
not parentRegistry))
251 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
252 if needCalibRegistry:
255 "calibRegistryPath", calibStorage,
259 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
260 "calibRoot ivar:%s or policy['calibRoot']:%s" %
261 (calibRoot, policy.get(
'calibRoot',
None)))
278 if 'defects' in policy:
279 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
280 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
281 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
292 raise ValueError(
'class variable packageName must not be None')
296 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
297 """Initialize mappings 299 For each of the dataset types that we want to be able to read, there are 300 methods that can be created to support them: 301 * map_<dataset> : determine the path for dataset 302 * std_<dataset> : standardize the retrieved dataset 303 * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery) 304 * query_<dataset> : query the registry 306 Besides the dataset types explicitly listed in the policy, we create 307 additional, derived datasets for additional conveniences, e.g., reading 308 the header of an image, retrieving only the size of a catalog. 310 @param policy (Policy) Policy with per-camera defaults already merged 311 @param rootStorage (Storage subclass instance) Interface to persisted repository data 312 @param calibRoot (Storage subclass instance) Interface to persisted calib repository data 313 @param provided (list of strings) Keys provided by the mapper 316 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
317 "obs_base",
"ImageMappingDictionary.paf",
"policy"))
318 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
319 "obs_base",
"ExposureMappingDictionary.paf",
"policy"))
320 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
321 "obs_base",
"CalibrationMappingDictionary.paf",
"policy"))
322 dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323 "obs_base",
"DatasetMappingDictionary.paf",
"policy"))
327 (
"images", imgMappingPolicy, ImageMapping),
328 (
"exposures", expMappingPolicy, ExposureMapping),
329 (
"calibrations", calMappingPolicy, CalibrationMapping),
330 (
"datasets", dsMappingPolicy, DatasetMapping)
333 for name, defPolicy, cls
in mappingList:
335 datasets = policy[name]
338 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
339 if os.path.exists(defaultsPath):
340 datasets.merge(dafPersist.Policy(defaultsPath))
343 setattr(self, name, mappings)
344 for datasetType
in datasets.names(
True):
345 subPolicy = datasets[datasetType]
346 subPolicy.merge(defPolicy)
348 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
349 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
350 subPolicy=subPolicy):
351 components = subPolicy.get(
'composite')
352 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 353 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 354 python = subPolicy[
'python']
355 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
356 disassembler=disassembler,
360 for name, component
in components.items():
361 butlerComposite.add(id=name,
362 datasetType=component.get(
'datasetType'),
363 setter=component.get(
'setter',
None),
364 getter=component.get(
'getter',
None),
365 subset=component.get(
'subset',
False),
366 inputOnly=component.get(
'inputOnly',
False))
367 return butlerComposite
368 setattr(self,
"map_" + datasetType, compositeClosure)
372 if name ==
"calibrations":
374 provided=provided, dataRoot=rootStorage)
376 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
377 self.
keyDict.update(mapping.keys())
378 mappings[datasetType] = mapping
379 self.
mappings[datasetType] = mapping
380 if not hasattr(self,
"map_" + datasetType):
381 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
382 return mapping.map(mapper, dataId, write)
383 setattr(self,
"map_" + datasetType, mapClosure)
384 if not hasattr(self,
"query_" + datasetType):
385 def queryClosure(format, dataId, mapping=mapping):
386 return mapping.lookup(format, dataId)
387 setattr(self,
"query_" + datasetType, queryClosure)
388 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
389 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
390 return mapping.standardize(mapper, item, dataId)
391 setattr(self,
"std_" + datasetType, stdClosure)
393 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
394 """Set convenience methods on CameraMapper""" 395 mapName =
"map_" + datasetType +
"_" + suffix
396 bypassName =
"bypass_" + datasetType +
"_" + suffix
397 queryName =
"query_" + datasetType +
"_" + suffix
398 if not hasattr(self, mapName):
399 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
400 if not hasattr(self, bypassName):
401 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
402 bypassImpl = getattr(self,
"bypass_" + datasetType)
403 if bypassImpl
is not None:
404 setattr(self, bypassName, bypassImpl)
405 if not hasattr(self, queryName):
406 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
409 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
410 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
412 if subPolicy[
"storage"] ==
"FitsStorage":
413 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
414 afwImage.readMetadata(location.getLocationsWithRoot()[0]))
417 addName =
"add_" + datasetType
418 if not hasattr(self, addName):
421 if name ==
"exposures":
422 setMethods(
"wcs", bypassImpl=
lambda datasetType, pythonType, location, dataId:
424 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
425 setMethods(
"calib", bypassImpl=
lambda datasetType, pythonType, location, dataId:
427 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
428 setMethods(
"visitInfo",
429 bypassImpl=
lambda datasetType, pythonType, location, dataId:
431 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
433 bypassImpl=
lambda datasetType, pythonType, location, dataId:
435 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
436 setMethods(
"detector",
437 mapImpl=
lambda dataId, write=
False:
438 dafPersist.ButlerLocation(
439 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
441 storageName=
"Internal",
442 locationList=
"ignored",
447 bypassImpl=
lambda datasetType, pythonType, location, dataId:
450 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
451 afwImage.bboxFromMetadata(
452 afwImage.readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
454 elif name ==
"images":
455 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
456 afwImage.bboxFromMetadata(
457 afwImage.readMetadata(location.getLocationsWithRoot()[0])))
459 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
460 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
461 afwImage.readMetadata(os.path.join(location.getStorage().root,
462 location.getLocations()[0]), hdu=1))
465 if subPolicy[
"storage"] ==
"FitsStorage":
466 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
467 subId = dataId.copy()
469 loc = mapping.map(mapper, subId, write)
470 bbox = dataId[
'bbox']
471 llcX = bbox.getMinX()
472 llcY = bbox.getMinY()
473 width = bbox.getWidth()
474 height = bbox.getHeight()
475 loc.additionalData.set(
'llcX', llcX)
476 loc.additionalData.set(
'llcY', llcY)
477 loc.additionalData.set(
'width', width)
478 loc.additionalData.set(
'height', height)
479 if 'imageOrigin' in dataId:
480 loc.additionalData.set(
'imageOrigin',
481 dataId[
'imageOrigin'])
484 def querySubClosure(key, format, dataId, mapping=mapping):
485 subId = dataId.copy()
487 return mapping.lookup(format, subId)
488 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
490 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
492 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
493 afwImage.readMetadata(os.path.join(location.getStorage().root,
494 location.getLocations()[0]),
495 hdu=1).get(
"NAXIS2"))
498 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
499 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
500 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
501 location.getLocations()[0])))
503 def _computeCcdExposureId(self, dataId):
504 """Compute the 64-bit (long) identifier for a CCD exposure. 506 Subclasses must override 508 @param dataId (dict) Data identifier with visit, ccd 510 raise NotImplementedError()
512 def _computeCoaddExposureId(self, dataId, singleFilter):
513 """Compute the 64-bit (long) identifier for a coadd. 515 Subclasses must override 517 @param dataId (dict) Data identifier with tract and patch. 518 @param singleFilter (bool) True means the desired ID is for a single- 519 filter coadd, in which case dataId 522 raise NotImplementedError()
524 def _search(self, path):
525 """Search for path in the associated repository's storage. 530 Path that describes an object in the repository associated with 532 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 533 indicator will be stripped when searching and so will match 534 filenames without the HDU indicator, e.g. 'foo.fits'. The path 535 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 540 The path for this object in the repository. Will return None if the 541 object can't be found. If the input argument path contained an HDU 542 indicator, the returned path will also contain the HDU indicator. 547 """Rename any existing object with the given type and dataId. 549 The CameraMapper implementation saves objects in a sequence of e.g.: 553 All of the backups will be placed in the output repo, however, and will 554 not be removed if they are found elsewhere in the _parent chain. This 555 means that the same file will be stored twice if the previous version was 556 found in an input repo. 565 def firstElement(list):
566 """Get the first element in the list, or None if that can't be done. 568 return list[0]
if list
is not None and len(list)
else None 571 newLocation = self.map(datasetType, dataId, write=
True)
572 newPath = newLocation.getLocations()[0]
573 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
574 path = firstElement(path)
576 while path
is not None:
578 oldPaths.append((n, path))
579 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
580 path = firstElement(path)
581 for n, oldPath
in reversed(oldPaths):
582 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
585 """Return supported keys. 586 @return (iterable) List of keys usable in a dataset identifier""" 590 """Return a dict of supported keys and their value types for a given dataset 591 type at a given level of the key hierarchy. 593 @param datasetType (str) dataset type or None for all dataset types 594 @param level (str) level or None for all levels or '' for the default level for the camera 595 @return (dict) dict keys are strings usable in a dataset identifier; values are their value types""" 601 if datasetType
is None:
602 keyDict = copy.copy(self.
keyDict)
605 if level
is not None and level
in self.
levels:
606 keyDict = copy.copy(keyDict)
607 for l
in self.
levels[level]:
622 """Return the name of the camera that this CameraMapper is for.""" 624 className = className[className.find(
'.'):-1]
625 m = re.search(
r'(\w+)Mapper', className)
627 m = re.search(
r"class '[\w.]*?(\w+)'", className)
629 return name[:1].lower() + name[1:]
if name
else '' 633 """Return the name of the package containing this CameraMapper.""" 635 raise ValueError(
'class variable packageName must not be None')
640 """Return the base directory of this package""" 644 """Map a camera dataset.""" 646 raise RuntimeError(
"No camera dataset available.")
648 return dafPersist.ButlerLocation(
649 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
651 storageName=
"ConfigStorage",
659 """Return the (preloaded) camera object. 662 raise RuntimeError(
"No camera dataset available.")
666 """Map defects dataset. 668 @return a very minimal ButlerLocation containing just the locationList field 669 (just enough information that bypass_defects can use it). 672 if defectFitsPath
is None:
673 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
675 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
680 """Return a defect based on the butler location returned by map_defects 682 @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file 683 @param[in] dataId: the usual data ID; "ccd" must be set 685 Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation 686 into an object, which is what we want for now, since that conversion is a bit tricky. 689 defectsFitsPath = butlerLocation.locationList[0]
690 with pyfits.open(defectsFitsPath)
as hduList:
691 for hdu
in hduList[1:]:
692 if hdu.header[
"name"] != detectorName:
696 for data
in hdu.data:
697 bbox = afwGeom.Box2I(
698 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
699 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
701 defectList.append(afwImage.DefectBase(bbox))
704 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
707 return dafPersist.ButlerLocation(
708 pythonType=
"lsst.obs.base.ExposureIdInfo",
710 storageName=
"Internal",
711 locationList=
"ignored",
718 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 719 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
720 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
724 """Disable standardization for bfKernel 726 bfKernel is a calibration product that is numpy array, 727 unlike other calibration products that are all images; 728 all calibration images are sent through _standardizeExposure 729 due to CalibrationMapping, but we don't want that to happen to bfKernel 734 """Standardize a raw dataset by converting it to an Exposure instead of an Image""" 736 trimmed=
False, setVisitInfo=
True)
739 """Map a sky policy.""" 740 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
741 "Internal",
None,
None, self,
745 """Standardize a sky policy by returning the one we use.""" 754 def _getCcdKeyVal(self, dataId):
755 """Return CCD key and value used to look a defect in the defect registry 757 The default implementation simply returns ("ccd", full detector name) 761 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
763 """Set up a registry (usually SQLite3), trying a number of possible 771 Description of registry (for log messages) 775 Policy that contains the registry name, used if path is None. 777 Key in policy for registry path. 778 storage : Storage subclass 779 Repository Storage to look in. 780 searchParents : bool, optional 781 True if the search for a registry should follow any Butler v1 783 posixIfNoSql : bool, optional 784 If an sqlite registry is not found, will create a posix registry if 789 lsst.daf.persistence.Registry 792 if path
is None and policyKey
in policy:
793 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
794 if os.path.isabs(path):
795 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
796 if not storage.exists(path):
797 newPath = storage.instanceSearch(path)
799 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 801 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
805 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
813 if path
and (path.startswith(root)):
814 path = path[len(root +
'/'):]
815 except AttributeError:
821 def search(filename, description):
822 """Search for file in storage 827 Filename to search for 829 Description of file, for error message. 833 path : `str` or `None` 834 Path to file, or None 836 result = storage.instanceSearch(filename)
839 self.
log.debug(
"Unable to locate %s: %s", description, filename)
844 path = search(
"%s.pgsql" % name,
"%s in root" % description)
846 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
848 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
851 if not storage.exists(path):
852 newPath = storage.instanceSearch(path)
853 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 854 if newPath
is not None:
856 localFileObj = storage.getLocalFile(path)
857 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
858 registry = dafPersist.Registry.create(localFileObj.name)
860 elif not registry
and posixIfNoSql:
862 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
863 registry = dafPersist.PosixRegistry(storage.root)
869 def _transformId(self, dataId):
870 """Generate a standard ID dict from a camera-specific ID dict. 872 Canonical keys include: 873 - amp: amplifier name 874 - ccd: CCD name (in LSST this is a combination of raft and sensor) 875 The default implementation returns a copy of its input. 877 @param dataId[in] (dict) Dataset identifier; this must not be modified 878 @return (dict) Transformed dataset identifier""" 882 def _mapActualToPath(self, template, actualId):
883 """Convert a template path to an actual path, using the actual data 884 identifier. This implementation is usually sufficient but can be 885 overridden by the subclass. 886 @param template (string) Template path 887 @param actualId (dict) Dataset identifier 888 @return (string) Pathname""" 892 return template % transformedId
893 except Exception
as e:
894 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
898 """Convert a CCD name to a form useful as a filename 900 The default implementation converts spaces to underscores. 902 return ccdName.replace(
" ",
"_")
904 def _extractDetectorName(self, dataId):
905 """Extract the detector (CCD) name from the dataset identifier. 907 The name in question is the detector name used by lsst.afw.cameraGeom. 909 @param dataId (dict) Dataset identifier 910 @return (string) Detector name 912 raise NotImplementedError(
"No _extractDetectorName() function specified")
914 def _extractAmpId(self, dataId):
915 """Extract the amplifier identifer from a dataset identifier. 917 @warning this is deprecated; DO NOT USE IT 919 amplifier identifier has two parts: the detector name for the CCD 920 containing the amplifier and index of the amplifier in the detector. 921 @param dataId (dict) Dataset identifer 922 @return (tuple) Amplifier identifier""" 925 return (trDataId[
"ccd"], int(trDataId[
'amp']))
927 def _setAmpDetector(self, item, dataId, trimmed=True):
928 """Set the detector object in an Exposure for an amplifier. 929 Defects are also added to the Exposure based on the detector object. 930 @param[in,out] item (lsst.afw.image.Exposure) 931 @param dataId (dict) Dataset identifier 932 @param trimmed (bool) Should detector be marked as trimmed? (ignored)""" 936 def _setCcdDetector(self, item, dataId, trimmed=True):
937 """Set the detector object in an Exposure for a CCD. 938 @param[in,out] item (lsst.afw.image.Exposure) 939 @param dataId (dict) Dataset identifier 940 @param trimmed (bool) Should detector be marked as trimmed? (ignored)""" 942 if item.getDetector()
is not None:
946 detector = self.
camera[detectorName]
947 item.setDetector(detector)
949 def _setFilter(self, mapping, item, dataId):
950 """Set the filter object in an Exposure. If the Exposure had a FILTER 951 keyword, this was already processed during load. But if it didn't, 952 use the filter from the registry. 953 @param mapping (lsst.obs.base.Mapping) 954 @param[in,out] item (lsst.afw.image.Exposure) 955 @param dataId (dict) Dataset identifier""" 957 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 958 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
961 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
964 actualId = mapping.need([
'filter'], dataId)
965 filterName = actualId[
'filter']
967 filterName = self.
filters[filterName]
968 item.setFilter(afwImage.Filter(filterName))
971 def _standardizeExposure(self, mapping, item, dataId, filter=True,
972 trimmed=True, setVisitInfo=True):
973 """Default standardization function for images. 975 This sets the Detector from the camera geometry 976 and optionally set the Fiter. In both cases this saves 977 having to persist some data in each exposure (or image). 979 @param mapping (lsst.obs.base.Mapping) 980 @param[in,out] item image-like object; any of lsst.afw.image.Exposure, 981 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 982 or lsst.afw.image.MaskedImage 983 @param dataId (dict) Dataset identifier 984 @param filter (bool) Set filter? Ignored if item is already an exposure 985 @param trimmed (bool) Should detector be marked as trimmed? 986 @param setVisitInfo (bool) Should Exposure have its VisitInfo filled out from the metadata? 987 @return (lsst.afw.image.Exposure) the standardized Exposure""" 990 except Exception
as e:
991 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
994 if mapping.level.lower() ==
"amp":
996 elif mapping.level.lower() ==
"ccd":
1004 def _defectLookup(self, dataId):
1005 """Find the defects for a given CCD. 1006 @param dataId (dict) Dataset identifier 1007 @return (string) path to the defects file or None if not available""" 1011 raise RuntimeError(
"No registry for defect lookup")
1015 dataIdForLookup = {
'visit': dataId[
'visit']}
1017 rows = self.
registry.lookup((
'taiObs'), (
'raw_visit'), dataIdForLookup)
1020 assert len(rows) == 1
1026 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1028 if not rows
or len(rows) == 0:
1031 return os.path.join(self.
defectPath, rows[0][0])
1033 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1034 (ccdVal, taiObs, len(rows),
", ".join([_[0]
for _
in rows])))
1036 def _makeCamera(self, policy, repositoryDir):
1037 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry 1039 Also set self.cameraDataLocation, if relevant (else it can be left None). 1041 This implementation assumes that policy contains an entry "camera" that points to the 1042 subdirectory in this package of camera data; specifically, that subdirectory must contain: 1043 - a file named `camera.py` that contains persisted camera config 1044 - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath 1046 @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility)) 1047 Policy with per-camera defaults already merged 1048 @param repositoryDir (string) Policy repository for the subclassing 1049 module (obtained with getRepositoryPath() on the 1050 per-camera default dictionary) 1052 if isinstance(policy, pexPolicy.Policy):
1053 policy = dafPersist.Policy(pexPolicy=policy)
1054 if 'camera' not in policy:
1055 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1056 cameraDataSubdir = policy[
'camera']
1058 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1059 cameraConfig = afwCameraGeom.CameraConfig()
1062 return afwCameraGeom.makeCameraFromPath(
1063 cameraConfig=cameraConfig,
1064 ampInfoPath=ampInfoPath,
1070 """Get the registry used by this mapper. 1075 The registry used by this mapper for this mapper's repository. 1080 """Stuff image compression settings into a daf.base.PropertySet 1082 This goes into the ButlerLocation's "additionalData", which gets 1083 passed into the boost::persistence framework. 1088 Type of dataset for which to get the image compression settings. 1094 additionalData : `lsst.daf.base.PropertySet` 1095 Image compression settings. 1097 mapping = self.
mappings[datasetType]
1098 recipeName = mapping.recipe
1099 storageType = mapping.storage
1101 return dafBase.PropertySet()
1103 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1104 (datasetType, storageType, recipeName))
1105 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1106 seed = hash(tuple(dataId.items())) % 2**31
1107 for plane
in (
"image",
"mask",
"variance"):
1108 if recipe.exists(plane +
".scaling.seed")
and recipe.get(plane +
".scaling.seed") == 0:
1109 recipe.set(plane +
".scaling.seed", seed)
1112 def _initWriteRecipes(self):
1113 """Read the recipes for writing files 1115 These recipes are currently used for configuring FITS compression, 1116 but they could have wider uses for configuring different flavors 1117 of the storage types. A recipe is referred to by a symbolic name, 1118 which has associated settings. These settings are stored as a 1119 `PropertySet` so they can easily be passed down to the 1120 boost::persistence framework as the "additionalData" parameter. 1122 The list of recipes is written in YAML. A default recipe and 1123 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1124 and these may be overridden or supplemented by the individual obs_* 1125 packages' own policy/writeRecipes.yaml files. 1127 Recipes are grouped by the storage type. Currently, only the 1128 ``FitsStorage`` storage type uses recipes, which uses it to 1129 configure FITS image compression. 1131 Each ``FitsStorage`` recipe for FITS compression should define 1132 "image", "mask" and "variance" entries, each of which may contain 1133 "compression" and "scaling" entries. Defaults will be provided for 1134 any missing elements under "compression" and "scaling". 1136 The allowed entries under "compression" are: 1138 * algorithm (string): compression algorithm to use 1139 * rows (int): number of rows per tile (0 = entire dimension) 1140 * columns (int): number of columns per tile (0 = entire dimension) 1141 * quantizeLevel (float): cfitsio quantization level 1143 The allowed entries under "scaling" are: 1145 * algorithm (string): scaling algorithm to use 1146 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1147 * fuzz (bool): fuzz the values when quantising floating-point values? 1148 * seed (long): seed for random number generator when fuzzing 1149 * maskPlanes (list of string): mask planes to ignore when doing statistics 1150 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1151 * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE) 1152 * bscale: manually specified BSCALE (for MANUAL scaling) 1153 * bzero: manually specified BSCALE (for MANUAL scaling) 1155 A very simple example YAML recipe: 1161 algorithm: GZIP_SHUFFLE 1165 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1166 recipes = dafPersist.Policy(recipesFile)
1167 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1168 validationMenu = {
'FitsStorage': validateRecipeFitsStorage,}
1169 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1170 supplements = dafPersist.Policy(supplementsFile)
1172 for entry
in validationMenu:
1173 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1175 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1176 (supplementsFile, entry, recipesFile, intersection))
1177 recipes.update(supplements)
1180 for storageType
in recipes.names(
True):
1181 if "default" not in recipes[storageType]:
1182 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1183 (storageType, recipesFile))
1184 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1188 """Generate an Exposure from an image-like object 1190 If the image is a DecoratedImage then also set its WCS and metadata 1191 (Image and MaskedImage are missing the necessary metadata 1192 and Exposure already has those set) 1194 @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure) 1195 @return (lsst.afw.image.Exposure) Exposure containing input image 1198 if isinstance(image, afwImage.MaskedImage):
1199 exposure = afwImage.makeExposure(image)
1200 elif isinstance(image, afwImage.DecoratedImage):
1201 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1202 metadata = image.getMetadata()
1204 wcs = afwImage.makeWcs(metadata,
True)
1205 exposure.setWcs(wcs)
1206 except pexExcept.InvalidParameterError
as e:
1209 logger = lsstLog.Log.getLogger(
"CameraMapper")
1210 logger.warn(
"wcs set to None; insufficient information found in metadata to create a valid wcs: " 1213 exposure.setMetadata(metadata)
1214 elif isinstance(image, afwImage.Exposure):
1217 metadata = exposure.getMetadata()
1220 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1224 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1225 if metadata
is not None:
1228 logger = lsstLog.Log.getLogger(
"CameraMapper")
1229 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1231 exposureId = mapper._computeCcdExposureId(dataId)
1232 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1234 exposure.getInfo().setVisitInfo(visitInfo)
1240 """Validate recipes for FitsStorage 1242 The recipes are supplemented with default values where appropriate. 1244 TODO: replace this custom validation code with Cerberus (DM-11846) 1248 recipes : `lsst.daf.persistence.Policy` 1249 FitsStorage recipes to validate. 1253 validated : `lsst.daf.base.PropertySet` 1254 Validated FitsStorage recipe. 1259 If validation fails. 1263 compressionSchema = {
1264 "algorithm":
"NONE",
1267 "quantizeLevel": 0.0,
1270 "algorithm":
"NONE",
1272 "maskPlanes": [
"NO_DATA"],
1274 "quantizeLevel": 4.0,
1281 def checkUnrecognized(entry, allowed, description):
1282 """Check to see if the entry contains unrecognised keywords""" 1283 unrecognized = set(entry.keys()) - set(allowed)
1286 "Unrecognized entries when parsing image compression recipe %s: %s" %
1287 (description, unrecognized))
1290 for name
in recipes.names(
True):
1291 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1292 rr = dafBase.PropertySet()
1293 validated[name] = rr
1294 for plane
in (
"image",
"mask",
"variance"):
1295 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1296 name +
"->" + plane)
1298 for settings, schema
in ((
"compression", compressionSchema),
1299 (
"scaling", scalingSchema)):
1300 prefix = plane +
"." + settings
1301 if settings
not in recipes[name][plane]:
1303 rr.set(prefix +
"." + key, schema[key])
1305 entry = recipes[name][plane][settings]
1306 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1308 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1309 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
Exposure ID and number of bits used.
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def _defectLookup(self, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.