23 from builtins
import str
29 import lsst.daf.persistence
as dafPersist
30 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base
as dafBase
32 import lsst.afw.geom
as afwGeom
33 import lsst.afw.image
as afwImage
34 import lsst.afw.table
as afwTable
35 from lsst.afw.fits
import readMetadata
36 import lsst.afw.cameraGeom
as afwCameraGeom
37 import lsst.log
as lsstLog
38 import lsst.pex.policy
as pexPolicy
40 from .exposureIdInfo
import ExposureIdInfo
41 from .makeRawVisitInfo
import MakeRawVisitInfo
44 """This module defines the CameraMapper base class.""" 49 """CameraMapper is a base class for mappers that handle images from a 50 camera and products derived from them. This provides an abstraction layer 51 between the data on disk and the code. 53 Public methods: keys, queryMetadata, getDatasetTypes, map, 54 canStandardize, standardize 56 Mappers for specific data sources (e.g., CFHT Megacam, LSST 57 simulations, etc.) should inherit this class. 59 The CameraMapper manages datasets within a "root" directory. Note that 60 writing to a dataset present in the input root will hide the existing 61 dataset but not overwrite it. See #2160 for design discussion. 63 A camera is assumed to consist of one or more rafts, each composed of 64 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 65 (amps). A camera is also assumed to have a camera geometry description 66 (CameraGeom object) as a policy file, a filter description (Filter class 67 static configuration) as another policy file, and an optional defects 68 description directory. 70 Information from the camera geometry and defects are inserted into all 71 Exposure objects returned. 73 The mapper uses one or two registries to retrieve metadata about the 74 images. The first is a registry of all raw exposures. This must contain 75 the time of the observation. One or more tables (or the equivalent) 76 within the registry are used to look up data identifier components that 77 are not specified by the user (e.g. filter) and to return results for 78 metadata queries. The second is an optional registry of all calibration 79 data. This should contain validity start and end entries for each 80 calibration dataset in the same timescale as the observation time. 82 Subclasses will typically set MakeRawVisitInfoClass: 84 MakeRawVisitInfoClass: a class variable that points to a subclass of 85 MakeRawVisitInfo, a functor that creates an 86 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 88 Subclasses must provide the following methods: 90 _extractDetectorName(self, dataId): returns the detector name for a CCD 91 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 92 a dataset identifier referring to that CCD or a subcomponent of it. 94 _computeCcdExposureId(self, dataId): see below 96 _computeCoaddExposureId(self, dataId, singleFilter): see below 98 Subclasses may also need to override the following methods: 100 _transformId(self, dataId): transformation of a data identifier 101 from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"), 102 including making suitable for path expansion (e.g. removing commas). 103 The default implementation does nothing. Note that this 104 method should not modify its input parameter. 106 getShortCcdName(self, ccdName): a static method that returns a shortened name 107 suitable for use as a filename. The default version converts spaces to underscores. 109 _getCcdKeyVal(self, dataId): return a CCD key and value 110 by which to look up defects in the defects registry. 111 The default value returns ("ccd", detector name) 113 _mapActualToPath(self, template, actualId): convert a template path to an 114 actual path, using the actual dataset identifier. 116 The mapper's behaviors are largely specified by the policy file. 117 See the MapperDictionary.paf for descriptions of the available items. 119 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 120 mappings (see Mappings class). 122 Common default mappings for all subclasses can be specified in the 123 "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides 124 a simple way to add a product to all camera mappers. 126 Functions to map (provide a path to the data given a dataset 127 identifier dictionary) and standardize (convert data into some standard 128 format or type) may be provided in the subclass as "map_{dataset type}" 129 and "std_{dataset type}", respectively. 131 If non-Exposure datasets cannot be retrieved using standard 132 daf_persistence methods alone, a "bypass_{dataset type}" function may be 133 provided in the subclass to return the dataset instead of using the 134 "datasets" subpolicy. 136 Implementations of map_camera and bypass_camera that should typically be 137 sufficient are provided in this base class. 140 * Handle defects the same was as all other calibration products, using the calibration registry 141 * Instead of auto-loading the camera at construction time, load it from the calibration registry 142 * Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention 143 of pyfits from this package. 149 MakeRawVisitInfoClass = MakeRawVisitInfo
152 PupilFactoryClass = afwCameraGeom.PupilFactory
154 def __init__(self, policy, repositoryDir,
155 root=None, registry=None, calibRoot=None, calibRegistry=None,
156 provided=None, parentRegistry=None, repositoryCfg=None):
157 """Initialize the CameraMapper. 161 policy : daf_persistence.Policy, 162 Can also be pexPolicy.Policy, only for backward compatibility. 163 Policy with per-camera defaults already merged. 164 repositoryDir : string 165 Policy repository for the subclassing module (obtained with 166 getRepositoryPath() on the per-camera default dictionary). 167 root : string, optional 168 Path to the root directory for data. 169 registry : string, optional 170 Path to registry with data's metadata. 171 calibRoot : string, optional 172 Root directory for calibrations. 173 calibRegistry : string, optional 174 Path to registry with calibrations' metadata. 175 provided : list of string, optional 176 Keys provided by the mapper. 177 parentRegistry : Registry subclass, optional 178 Registry from a parent repository that may be used to look up 180 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 181 The configuration information for the repository this mapper is 185 dafPersist.Mapper.__init__(self)
187 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
192 self.
root = repositoryCfg.root
195 if isinstance(policy, pexPolicy.Policy):
196 policy = dafPersist.Policy(policy)
198 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 199 if repoPolicy
is not None:
200 policy.update(repoPolicy)
202 defaultPolicyFile = dafPersist.Policy.defaultPolicyFile(
"obs_base",
203 "MapperDictionary.paf",
205 dictPolicy = dafPersist.Policy(defaultPolicyFile)
206 policy.merge(dictPolicy)
210 if 'levels' in policy:
211 levelsPolicy = policy[
'levels']
212 for key
in levelsPolicy.names(
True):
213 self.
levels[key] = set(levelsPolicy.asArray(key))
216 if 'defaultSubLevels' in policy:
222 root = dafPersist.LogicalLocation(root).locString()
232 if calibRoot
is not None:
233 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
234 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
237 calibRoot = policy.get(
'calibRoot',
None)
239 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
241 if calibStorage
is None:
249 posixIfNoSql=(
not parentRegistry))
252 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
253 if needCalibRegistry:
256 "calibRegistryPath", calibStorage,
260 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
261 "calibRoot ivar:%s or policy['calibRoot']:%s" %
262 (calibRoot, policy.get(
'calibRoot',
None)))
279 if 'defects' in policy:
280 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
281 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
282 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
293 raise ValueError(
'class variable packageName must not be None')
297 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
298 """Initialize mappings 300 For each of the dataset types that we want to be able to read, there are 301 methods that can be created to support them: 302 * map_<dataset> : determine the path for dataset 303 * std_<dataset> : standardize the retrieved dataset 304 * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery) 305 * query_<dataset> : query the registry 307 Besides the dataset types explicitly listed in the policy, we create 308 additional, derived datasets for additional conveniences, e.g., reading 309 the header of an image, retrieving only the size of a catalog. 311 @param policy (Policy) Policy with per-camera defaults already merged 312 @param rootStorage (Storage subclass instance) Interface to persisted repository data 313 @param calibRoot (Storage subclass instance) Interface to persisted calib repository data 314 @param provided (list of strings) Keys provided by the mapper 317 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
318 "obs_base",
"ImageMappingDictionary.paf",
"policy"))
319 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
320 "obs_base",
"ExposureMappingDictionary.paf",
"policy"))
321 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
322 "obs_base",
"CalibrationMappingDictionary.paf",
"policy"))
323 dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324 "obs_base",
"DatasetMappingDictionary.paf",
"policy"))
328 (
"images", imgMappingPolicy, ImageMapping),
329 (
"exposures", expMappingPolicy, ExposureMapping),
330 (
"calibrations", calMappingPolicy, CalibrationMapping),
331 (
"datasets", dsMappingPolicy, DatasetMapping)
334 for name, defPolicy, cls
in mappingList:
336 datasets = policy[name]
339 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
340 if os.path.exists(defaultsPath):
341 datasets.merge(dafPersist.Policy(defaultsPath))
344 setattr(self, name, mappings)
345 for datasetType
in datasets.names(
True):
346 subPolicy = datasets[datasetType]
347 subPolicy.merge(defPolicy)
349 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
350 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
351 subPolicy=subPolicy):
352 components = subPolicy.get(
'composite')
353 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 354 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 355 python = subPolicy[
'python']
356 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
357 disassembler=disassembler,
361 for name, component
in components.items():
362 butlerComposite.add(id=name,
363 datasetType=component.get(
'datasetType'),
364 setter=component.get(
'setter',
None),
365 getter=component.get(
'getter',
None),
366 subset=component.get(
'subset',
False),
367 inputOnly=component.get(
'inputOnly',
False))
368 return butlerComposite
369 setattr(self,
"map_" + datasetType, compositeClosure)
373 if name ==
"calibrations":
375 provided=provided, dataRoot=rootStorage)
377 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
378 self.
keyDict.update(mapping.keys())
379 mappings[datasetType] = mapping
380 self.
mappings[datasetType] = mapping
381 if not hasattr(self,
"map_" + datasetType):
382 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
383 return mapping.map(mapper, dataId, write)
384 setattr(self,
"map_" + datasetType, mapClosure)
385 if not hasattr(self,
"query_" + datasetType):
386 def queryClosure(format, dataId, mapping=mapping):
387 return mapping.lookup(format, dataId)
388 setattr(self,
"query_" + datasetType, queryClosure)
389 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
390 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
391 return mapping.standardize(mapper, item, dataId)
392 setattr(self,
"std_" + datasetType, stdClosure)
394 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
395 """Set convenience methods on CameraMapper""" 396 mapName =
"map_" + datasetType +
"_" + suffix
397 bypassName =
"bypass_" + datasetType +
"_" + suffix
398 queryName =
"query_" + datasetType +
"_" + suffix
399 if not hasattr(self, mapName):
400 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
401 if not hasattr(self, bypassName):
402 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
403 bypassImpl = getattr(self,
"bypass_" + datasetType)
404 if bypassImpl
is not None:
405 setattr(self, bypassName, bypassImpl)
406 if not hasattr(self, queryName):
407 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
410 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
411 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
413 if subPolicy[
"storage"] ==
"FitsStorage":
414 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
415 readMetadata(location.getLocationsWithRoot()[0]))
418 addName =
"add_" + datasetType
419 if not hasattr(self, addName):
422 if name ==
"exposures":
423 setMethods(
"wcs", bypassImpl=
lambda datasetType, pythonType, location, dataId:
424 afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
425 setMethods(
"calib", bypassImpl=
lambda datasetType, pythonType, location, dataId:
426 afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
427 setMethods(
"visitInfo",
428 bypassImpl=
lambda datasetType, pythonType, location, dataId:
429 afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
431 bypassImpl=
lambda datasetType, pythonType, location, dataId:
432 afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
433 setMethods(
"detector",
434 mapImpl=
lambda dataId, write=
False:
435 dafPersist.ButlerLocation(
436 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
438 storageName=
"Internal",
439 locationList=
"ignored",
444 bypassImpl=
lambda datasetType, pythonType, location, dataId:
447 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
448 afwImage.bboxFromMetadata(
449 readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
451 elif name ==
"images":
452 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
453 afwImage.bboxFromMetadata(
454 readMetadata(location.getLocationsWithRoot()[0])))
456 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
457 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
458 readMetadata(os.path.join(location.getStorage().root,
459 location.getLocations()[0]), hdu=1))
462 if subPolicy[
"storage"] ==
"FitsStorage":
463 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
464 subId = dataId.copy()
466 loc = mapping.map(mapper, subId, write)
467 bbox = dataId[
'bbox']
468 llcX = bbox.getMinX()
469 llcY = bbox.getMinY()
470 width = bbox.getWidth()
471 height = bbox.getHeight()
472 loc.additionalData.set(
'llcX', llcX)
473 loc.additionalData.set(
'llcY', llcY)
474 loc.additionalData.set(
'width', width)
475 loc.additionalData.set(
'height', height)
476 if 'imageOrigin' in dataId:
477 loc.additionalData.set(
'imageOrigin',
478 dataId[
'imageOrigin'])
481 def querySubClosure(key, format, dataId, mapping=mapping):
482 subId = dataId.copy()
484 return mapping.lookup(format, subId)
485 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
487 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
489 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
490 readMetadata(os.path.join(location.getStorage().root,
491 location.getLocations()[0]),
492 hdu=1).get(
"NAXIS2"))
495 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
496 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
497 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
498 location.getLocations()[0])))
500 def _computeCcdExposureId(self, dataId):
501 """Compute the 64-bit (long) identifier for a CCD exposure. 503 Subclasses must override 505 @param dataId (dict) Data identifier with visit, ccd 507 raise NotImplementedError()
509 def _computeCoaddExposureId(self, dataId, singleFilter):
510 """Compute the 64-bit (long) identifier for a coadd. 512 Subclasses must override 514 @param dataId (dict) Data identifier with tract and patch. 515 @param singleFilter (bool) True means the desired ID is for a single- 516 filter coadd, in which case dataId 519 raise NotImplementedError()
521 def _search(self, path):
522 """Search for path in the associated repository's storage. 527 Path that describes an object in the repository associated with 529 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 530 indicator will be stripped when searching and so will match 531 filenames without the HDU indicator, e.g. 'foo.fits'. The path 532 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 537 The path for this object in the repository. Will return None if the 538 object can't be found. If the input argument path contained an HDU 539 indicator, the returned path will also contain the HDU indicator. 544 """Rename any existing object with the given type and dataId. 546 The CameraMapper implementation saves objects in a sequence of e.g.: 550 All of the backups will be placed in the output repo, however, and will 551 not be removed if they are found elsewhere in the _parent chain. This 552 means that the same file will be stored twice if the previous version was 553 found in an input repo. 562 def firstElement(list):
563 """Get the first element in the list, or None if that can't be done. 565 return list[0]
if list
is not None and len(list)
else None 568 newLocation = self.map(datasetType, dataId, write=
True)
569 newPath = newLocation.getLocations()[0]
570 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
571 path = firstElement(path)
573 while path
is not None:
575 oldPaths.append((n, path))
576 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
577 path = firstElement(path)
578 for n, oldPath
in reversed(oldPaths):
579 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
582 """Return supported keys. 583 @return (iterable) List of keys usable in a dataset identifier""" 587 """Return a dict of supported keys and their value types for a given dataset 588 type at a given level of the key hierarchy. 590 @param datasetType (str) dataset type or None for all dataset types 591 @param level (str) level or None for all levels or '' for the default level for the camera 592 @return (dict) dict keys are strings usable in a dataset identifier; values are their value types""" 598 if datasetType
is None:
599 keyDict = copy.copy(self.
keyDict)
602 if level
is not None and level
in self.
levels:
603 keyDict = copy.copy(keyDict)
604 for l
in self.
levels[level]:
619 """Return the name of the camera that this CameraMapper is for.""" 621 className = className[className.find(
'.'):-1]
622 m = re.search(
r'(\w+)Mapper', className)
624 m = re.search(
r"class '[\w.]*?(\w+)'", className)
626 return name[:1].lower() + name[1:]
if name
else '' 630 """Return the name of the package containing this CameraMapper.""" 632 raise ValueError(
'class variable packageName must not be None')
637 """Return the base directory of this package""" 641 """Map a camera dataset.""" 643 raise RuntimeError(
"No camera dataset available.")
645 return dafPersist.ButlerLocation(
646 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
648 storageName=
"ConfigStorage",
656 """Return the (preloaded) camera object. 659 raise RuntimeError(
"No camera dataset available.")
663 """Map defects dataset. 665 @return a very minimal ButlerLocation containing just the locationList field 666 (just enough information that bypass_defects can use it). 669 if defectFitsPath
is None:
670 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
672 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
677 """Return a defect based on the butler location returned by map_defects 679 @param[in] butlerLocation: a ButlerLocation with locationList = path to defects FITS file 680 @param[in] dataId: the usual data ID; "ccd" must be set 682 Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation 683 into an object, which is what we want for now, since that conversion is a bit tricky. 686 defectsFitsPath = butlerLocation.locationList[0]
687 with pyfits.open(defectsFitsPath)
as hduList:
688 for hdu
in hduList[1:]:
689 if hdu.header[
"name"] != detectorName:
693 for data
in hdu.data:
694 bbox = afwGeom.Box2I(
695 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
696 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
698 defectList.append(afwImage.DefectBase(bbox))
701 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
704 return dafPersist.ButlerLocation(
705 pythonType=
"lsst.obs.base.ExposureIdInfo",
707 storageName=
"Internal",
708 locationList=
"ignored",
715 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 716 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
717 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
721 """Disable standardization for bfKernel 723 bfKernel is a calibration product that is numpy array, 724 unlike other calibration products that are all images; 725 all calibration images are sent through _standardizeExposure 726 due to CalibrationMapping, but we don't want that to happen to bfKernel 731 """Standardize a raw dataset by converting it to an Exposure instead of an Image""" 733 trimmed=
False, setVisitInfo=
True)
736 """Map a sky policy.""" 737 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
738 "Internal",
None,
None, self,
742 """Standardize a sky policy by returning the one we use.""" 751 def _getCcdKeyVal(self, dataId):
752 """Return CCD key and value used to look a defect in the defect registry 754 The default implementation simply returns ("ccd", full detector name) 758 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
760 """Set up a registry (usually SQLite3), trying a number of possible 768 Description of registry (for log messages) 772 Policy that contains the registry name, used if path is None. 774 Key in policy for registry path. 775 storage : Storage subclass 776 Repository Storage to look in. 777 searchParents : bool, optional 778 True if the search for a registry should follow any Butler v1 780 posixIfNoSql : bool, optional 781 If an sqlite registry is not found, will create a posix registry if 786 lsst.daf.persistence.Registry 789 if path
is None and policyKey
in policy:
790 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
791 if os.path.isabs(path):
792 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
793 if not storage.exists(path):
794 newPath = storage.instanceSearch(path)
796 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 798 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
802 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
810 if path
and (path.startswith(root)):
811 path = path[len(root +
'/'):]
812 except AttributeError:
818 def search(filename, description):
819 """Search for file in storage 824 Filename to search for 826 Description of file, for error message. 830 path : `str` or `None` 831 Path to file, or None 833 result = storage.instanceSearch(filename)
836 self.
log.debug(
"Unable to locate %s: %s", description, filename)
841 path = search(
"%s.pgsql" % name,
"%s in root" % description)
843 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
845 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
848 if not storage.exists(path):
849 newPath = storage.instanceSearch(path)
850 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 851 if newPath
is not None:
853 localFileObj = storage.getLocalFile(path)
854 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
855 registry = dafPersist.Registry.create(localFileObj.name)
857 elif not registry
and posixIfNoSql:
859 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
860 registry = dafPersist.PosixRegistry(storage.root)
866 def _transformId(self, dataId):
867 """Generate a standard ID dict from a camera-specific ID dict. 869 Canonical keys include: 870 - amp: amplifier name 871 - ccd: CCD name (in LSST this is a combination of raft and sensor) 872 The default implementation returns a copy of its input. 874 @param dataId[in] (dict) Dataset identifier; this must not be modified 875 @return (dict) Transformed dataset identifier""" 879 def _mapActualToPath(self, template, actualId):
880 """Convert a template path to an actual path, using the actual data 881 identifier. This implementation is usually sufficient but can be 882 overridden by the subclass. 883 @param template (string) Template path 884 @param actualId (dict) Dataset identifier 885 @return (string) Pathname""" 889 return template % transformedId
890 except Exception
as e:
891 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
895 """Convert a CCD name to a form useful as a filename 897 The default implementation converts spaces to underscores. 899 return ccdName.replace(
" ",
"_")
901 def _extractDetectorName(self, dataId):
902 """Extract the detector (CCD) name from the dataset identifier. 904 The name in question is the detector name used by lsst.afw.cameraGeom. 906 @param dataId (dict) Dataset identifier 907 @return (string) Detector name 909 raise NotImplementedError(
"No _extractDetectorName() function specified")
911 def _extractAmpId(self, dataId):
912 """Extract the amplifier identifer from a dataset identifier. 914 @warning this is deprecated; DO NOT USE IT 916 amplifier identifier has two parts: the detector name for the CCD 917 containing the amplifier and index of the amplifier in the detector. 918 @param dataId (dict) Dataset identifer 919 @return (tuple) Amplifier identifier""" 922 return (trDataId[
"ccd"], int(trDataId[
'amp']))
924 def _setAmpDetector(self, item, dataId, trimmed=True):
925 """Set the detector object in an Exposure for an amplifier. 926 Defects are also added to the Exposure based on the detector object. 927 @param[in,out] item (lsst.afw.image.Exposure) 928 @param dataId (dict) Dataset identifier 929 @param trimmed (bool) Should detector be marked as trimmed? (ignored)""" 933 def _setCcdDetector(self, item, dataId, trimmed=True):
934 """Set the detector object in an Exposure for a CCD. 935 @param[in,out] item (lsst.afw.image.Exposure) 936 @param dataId (dict) Dataset identifier 937 @param trimmed (bool) Should detector be marked as trimmed? (ignored)""" 939 if item.getDetector()
is not None:
943 detector = self.
camera[detectorName]
944 item.setDetector(detector)
946 def _setFilter(self, mapping, item, dataId):
947 """Set the filter object in an Exposure. If the Exposure had a FILTER 948 keyword, this was already processed during load. But if it didn't, 949 use the filter from the registry. 950 @param mapping (lsst.obs.base.Mapping) 951 @param[in,out] item (lsst.afw.image.Exposure) 952 @param dataId (dict) Dataset identifier""" 954 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 955 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
958 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
961 actualId = mapping.need([
'filter'], dataId)
962 filterName = actualId[
'filter']
964 filterName = self.
filters[filterName]
965 item.setFilter(afwImage.Filter(filterName))
968 def _standardizeExposure(self, mapping, item, dataId, filter=True,
969 trimmed=True, setVisitInfo=True):
970 """Default standardization function for images. 972 This sets the Detector from the camera geometry 973 and optionally set the Fiter. In both cases this saves 974 having to persist some data in each exposure (or image). 976 @param mapping (lsst.obs.base.Mapping) 977 @param[in,out] item image-like object; any of lsst.afw.image.Exposure, 978 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 979 or lsst.afw.image.MaskedImage 980 @param dataId (dict) Dataset identifier 981 @param filter (bool) Set filter? Ignored if item is already an exposure 982 @param trimmed (bool) Should detector be marked as trimmed? 983 @param setVisitInfo (bool) Should Exposure have its VisitInfo filled out from the metadata? 984 @return (lsst.afw.image.Exposure) the standardized Exposure""" 987 except Exception
as e:
988 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
991 if mapping.level.lower() ==
"amp":
993 elif mapping.level.lower() ==
"ccd":
1001 def _defectLookup(self, dataId):
1002 """Find the defects for a given CCD. 1003 @param dataId (dict) Dataset identifier 1004 @return (string) path to the defects file or None if not available""" 1008 raise RuntimeError(
"No registry for defect lookup")
1012 dataIdForLookup = {
'visit': dataId[
'visit']}
1014 rows = self.
registry.lookup((
'taiObs'), (
'raw_visit'), dataIdForLookup)
1017 assert len(rows) == 1
1023 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1025 if not rows
or len(rows) == 0:
1028 return os.path.join(self.
defectPath, rows[0][0])
1030 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1031 (ccdVal, taiObs, len(rows),
", ".join([_[0]
for _
in rows])))
1033 def _makeCamera(self, policy, repositoryDir):
1034 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry 1036 Also set self.cameraDataLocation, if relevant (else it can be left None). 1038 This implementation assumes that policy contains an entry "camera" that points to the 1039 subdirectory in this package of camera data; specifically, that subdirectory must contain: 1040 - a file named `camera.py` that contains persisted camera config 1041 - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath 1043 @param policy (daf_persistence.Policy, or pexPolicy.Policy (only for backward compatibility)) 1044 Policy with per-camera defaults already merged 1045 @param repositoryDir (string) Policy repository for the subclassing 1046 module (obtained with getRepositoryPath() on the 1047 per-camera default dictionary) 1049 if isinstance(policy, pexPolicy.Policy):
1050 policy = dafPersist.Policy(pexPolicy=policy)
1051 if 'camera' not in policy:
1052 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1053 cameraDataSubdir = policy[
'camera']
1055 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1056 cameraConfig = afwCameraGeom.CameraConfig()
1059 return afwCameraGeom.makeCameraFromPath(
1060 cameraConfig=cameraConfig,
1061 ampInfoPath=ampInfoPath,
1067 """Get the registry used by this mapper. 1072 The registry used by this mapper for this mapper's repository. 1077 """Stuff image compression settings into a daf.base.PropertySet 1079 This goes into the ButlerLocation's "additionalData", which gets 1080 passed into the boost::persistence framework. 1085 Type of dataset for which to get the image compression settings. 1091 additionalData : `lsst.daf.base.PropertySet` 1092 Image compression settings. 1094 mapping = self.
mappings[datasetType]
1095 recipeName = mapping.recipe
1096 storageType = mapping.storage
1098 return dafBase.PropertySet()
1100 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1101 (datasetType, storageType, recipeName))
1102 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1103 seed = hash(tuple(dataId.items())) % 2**31
1104 for plane
in (
"image",
"mask",
"variance"):
1105 if recipe.exists(plane +
".scaling.seed")
and recipe.get(plane +
".scaling.seed") == 0:
1106 recipe.set(plane +
".scaling.seed", seed)
1109 def _initWriteRecipes(self):
1110 """Read the recipes for writing files 1112 These recipes are currently used for configuring FITS compression, 1113 but they could have wider uses for configuring different flavors 1114 of the storage types. A recipe is referred to by a symbolic name, 1115 which has associated settings. These settings are stored as a 1116 `PropertySet` so they can easily be passed down to the 1117 boost::persistence framework as the "additionalData" parameter. 1119 The list of recipes is written in YAML. A default recipe and 1120 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1121 and these may be overridden or supplemented by the individual obs_* 1122 packages' own policy/writeRecipes.yaml files. 1124 Recipes are grouped by the storage type. Currently, only the 1125 ``FitsStorage`` storage type uses recipes, which uses it to 1126 configure FITS image compression. 1128 Each ``FitsStorage`` recipe for FITS compression should define 1129 "image", "mask" and "variance" entries, each of which may contain 1130 "compression" and "scaling" entries. Defaults will be provided for 1131 any missing elements under "compression" and "scaling". 1133 The allowed entries under "compression" are: 1135 * algorithm (string): compression algorithm to use 1136 * rows (int): number of rows per tile (0 = entire dimension) 1137 * columns (int): number of columns per tile (0 = entire dimension) 1138 * quantizeLevel (float): cfitsio quantization level 1140 The allowed entries under "scaling" are: 1142 * algorithm (string): scaling algorithm to use 1143 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1144 * fuzz (bool): fuzz the values when quantising floating-point values? 1145 * seed (long): seed for random number generator when fuzzing 1146 * maskPlanes (list of string): mask planes to ignore when doing statistics 1147 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1148 * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE) 1149 * bscale: manually specified BSCALE (for MANUAL scaling) 1150 * bzero: manually specified BSCALE (for MANUAL scaling) 1152 A very simple example YAML recipe: 1158 algorithm: GZIP_SHUFFLE 1162 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1163 recipes = dafPersist.Policy(recipesFile)
1164 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1165 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1166 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1167 supplements = dafPersist.Policy(supplementsFile)
1169 for entry
in validationMenu:
1170 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1172 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1173 (supplementsFile, entry, recipesFile, intersection))
1174 recipes.update(supplements)
1177 for storageType
in recipes.names(
True):
1178 if "default" not in recipes[storageType]:
1179 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1180 (storageType, recipesFile))
1181 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1185 """Generate an Exposure from an image-like object 1187 If the image is a DecoratedImage then also set its WCS and metadata 1188 (Image and MaskedImage are missing the necessary metadata 1189 and Exposure already has those set) 1191 @param[in] image Image-like object (lsst.afw.image.DecoratedImage, Image, MaskedImage or Exposure) 1192 @return (lsst.afw.image.Exposure) Exposure containing input image 1195 if isinstance(image, afwImage.MaskedImage):
1196 exposure = afwImage.makeExposure(image)
1197 elif isinstance(image, afwImage.DecoratedImage):
1198 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1199 metadata = image.getMetadata()
1201 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1202 exposure.setWcs(wcs)
1203 except pexExcept.TypeError
as e:
1206 logger = lsstLog.Log.getLogger(
"CameraMapper")
1207 logger.warn(
"wcs set to None; insufficient information found in metadata to create a valid wcs: " 1210 exposure.setMetadata(metadata)
1211 elif isinstance(image, afwImage.Exposure):
1214 metadata = exposure.getMetadata()
1217 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1221 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1222 if metadata
is not None:
1225 logger = lsstLog.Log.getLogger(
"CameraMapper")
1226 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1228 exposureId = mapper._computeCcdExposureId(dataId)
1229 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1231 exposure.getInfo().setVisitInfo(visitInfo)
1237 """Validate recipes for FitsStorage 1239 The recipes are supplemented with default values where appropriate. 1241 TODO: replace this custom validation code with Cerberus (DM-11846) 1245 recipes : `lsst.daf.persistence.Policy` 1246 FitsStorage recipes to validate. 1250 validated : `lsst.daf.base.PropertySet` 1251 Validated FitsStorage recipe. 1256 If validation fails. 1260 compressionSchema = {
1261 "algorithm":
"NONE",
1264 "quantizeLevel": 0.0,
1267 "algorithm":
"NONE",
1269 "maskPlanes": [
"NO_DATA"],
1271 "quantizeLevel": 4.0,
1278 def checkUnrecognized(entry, allowed, description):
1279 """Check to see if the entry contains unrecognised keywords""" 1280 unrecognized = set(entry.keys()) - set(allowed)
1283 "Unrecognized entries when parsing image compression recipe %s: %s" %
1284 (description, unrecognized))
1287 for name
in recipes.names(
True):
1288 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1289 rr = dafBase.PropertySet()
1290 validated[name] = rr
1291 for plane
in (
"image",
"mask",
"variance"):
1292 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1293 name +
"->" + plane)
1295 for settings, schema
in ((
"compression", compressionSchema),
1296 (
"scaling", scalingSchema)):
1297 prefix = plane +
"." + settings
1298 if settings
not in recipes[name][plane]:
1300 rr.set(prefix +
"." + key, schema[key])
1302 entry = recipes[name][plane][settings]
1303 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1305 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1306 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
Exposure ID and number of bits used.
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def _defectLookup(self, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.