28 import lsst.daf.persistence
as dafPersist
29 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base
as dafBase
31 import lsst.afw.geom
as afwGeom
32 import lsst.afw.image
as afwImage
33 import lsst.afw.table
as afwTable
34 from lsst.afw.fits
import readMetadata
35 import lsst.afw.cameraGeom
as afwCameraGeom
36 import lsst.log
as lsstLog
37 import lsst.pex.policy
as pexPolicy
39 from .exposureIdInfo
import ExposureIdInfo
40 from .makeRawVisitInfo
import MakeRawVisitInfo
43 __all__ = [
"CameraMapper",
"exposureFromImage"]
48 """CameraMapper is a base class for mappers that handle images from a 49 camera and products derived from them. This provides an abstraction layer 50 between the data on disk and the code. 52 Public methods: keys, queryMetadata, getDatasetTypes, map, 53 canStandardize, standardize 55 Mappers for specific data sources (e.g., CFHT Megacam, LSST 56 simulations, etc.) should inherit this class. 58 The CameraMapper manages datasets within a "root" directory. Note that 59 writing to a dataset present in the input root will hide the existing 60 dataset but not overwrite it. See #2160 for design discussion. 62 A camera is assumed to consist of one or more rafts, each composed of 63 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 64 (amps). A camera is also assumed to have a camera geometry description 65 (CameraGeom object) as a policy file, a filter description (Filter class 66 static configuration) as another policy file, and an optional defects 67 description directory. 69 Information from the camera geometry and defects are inserted into all 70 Exposure objects returned. 72 The mapper uses one or two registries to retrieve metadata about the 73 images. The first is a registry of all raw exposures. This must contain 74 the time of the observation. One or more tables (or the equivalent) 75 within the registry are used to look up data identifier components that 76 are not specified by the user (e.g. filter) and to return results for 77 metadata queries. The second is an optional registry of all calibration 78 data. This should contain validity start and end entries for each 79 calibration dataset in the same timescale as the observation time. 81 Subclasses will typically set MakeRawVisitInfoClass: 83 MakeRawVisitInfoClass: a class variable that points to a subclass of 84 MakeRawVisitInfo, a functor that creates an 85 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 87 Subclasses must provide the following methods: 89 _extractDetectorName(self, dataId): returns the detector name for a CCD 90 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 91 a dataset identifier referring to that CCD or a subcomponent of it. 93 _computeCcdExposureId(self, dataId): see below 95 _computeCoaddExposureId(self, dataId, singleFilter): see below 97 Subclasses may also need to override the following methods: 99 _transformId(self, dataId): transformation of a data identifier 100 from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"), 101 including making suitable for path expansion (e.g. removing commas). 102 The default implementation does nothing. Note that this 103 method should not modify its input parameter. 105 getShortCcdName(self, ccdName): a static method that returns a shortened name 106 suitable for use as a filename. The default version converts spaces to underscores. 108 _getCcdKeyVal(self, dataId): return a CCD key and value 109 by which to look up defects in the defects registry. 110 The default value returns ("ccd", detector name) 112 _mapActualToPath(self, template, actualId): convert a template path to an 113 actual path, using the actual dataset identifier. 115 The mapper's behaviors are largely specified by the policy file. 116 See the MapperDictionary.paf for descriptions of the available items. 118 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 119 mappings (see Mappings class). 121 Common default mappings for all subclasses can be specified in the 122 "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides 123 a simple way to add a product to all camera mappers. 125 Functions to map (provide a path to the data given a dataset 126 identifier dictionary) and standardize (convert data into some standard 127 format or type) may be provided in the subclass as "map_{dataset type}" 128 and "std_{dataset type}", respectively. 130 If non-Exposure datasets cannot be retrieved using standard 131 daf_persistence methods alone, a "bypass_{dataset type}" function may be 132 provided in the subclass to return the dataset instead of using the 133 "datasets" subpolicy. 135 Implementations of map_camera and bypass_camera that should typically be 136 sufficient are provided in this base class. 142 - Handle defects the same was as all other calibration products, using the calibration registry 143 - Instead of auto-loading the camera at construction time, load it from the calibration registry 144 - Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention 145 of pyfits from this package. 151 MakeRawVisitInfoClass = MakeRawVisitInfo
154 PupilFactoryClass = afwCameraGeom.PupilFactory
156 def __init__(self, policy, repositoryDir,
157 root=None, registry=None, calibRoot=None, calibRegistry=None,
158 provided=None, parentRegistry=None, repositoryCfg=None):
159 """Initialize the CameraMapper. 163 policy : daf_persistence.Policy, 164 Can also be pexPolicy.Policy, only for backward compatibility. 165 Policy with per-camera defaults already merged. 166 repositoryDir : string 167 Policy repository for the subclassing module (obtained with 168 getRepositoryPath() on the per-camera default dictionary). 169 root : string, optional 170 Path to the root directory for data. 171 registry : string, optional 172 Path to registry with data's metadata. 173 calibRoot : string, optional 174 Root directory for calibrations. 175 calibRegistry : string, optional 176 Path to registry with calibrations' metadata. 177 provided : list of string, optional 178 Keys provided by the mapper. 179 parentRegistry : Registry subclass, optional 180 Registry from a parent repository that may be used to look up 182 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 183 The configuration information for the repository this mapper is 187 dafPersist.Mapper.__init__(self)
189 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
194 self.
root = repositoryCfg.root
197 if isinstance(policy, pexPolicy.Policy):
198 policy = dafPersist.Policy(policy)
200 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 201 if repoPolicy
is not None:
202 policy.update(repoPolicy)
204 defaultPolicyFile = dafPersist.Policy.defaultPolicyFile(
"obs_base",
205 "MapperDictionary.paf",
207 dictPolicy = dafPersist.Policy(defaultPolicyFile)
208 policy.merge(dictPolicy)
212 if 'levels' in policy:
213 levelsPolicy = policy[
'levels']
214 for key
in levelsPolicy.names(
True):
215 self.
levels[key] = set(levelsPolicy.asArray(key))
218 if 'defaultSubLevels' in policy:
224 root = dafPersist.LogicalLocation(root).locString()
234 if calibRoot
is not None:
235 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
236 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
239 calibRoot = policy.get(
'calibRoot',
None)
241 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
243 if calibStorage
is None:
251 posixIfNoSql=(
not parentRegistry))
254 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
255 if needCalibRegistry:
258 "calibRegistryPath", calibStorage,
262 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
263 "calibRoot ivar:%s or policy['calibRoot']:%s" %
264 (calibRoot, policy.get(
'calibRoot',
None)))
281 if 'defects' in policy:
282 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
283 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
284 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
292 raise ValueError(
'class variable packageName must not be None')
296 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
297 """Initialize mappings 299 For each of the dataset types that we want to be able to read, there are 300 methods that can be created to support them: 301 * map_<dataset> : determine the path for dataset 302 * std_<dataset> : standardize the retrieved dataset 303 * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery) 304 * query_<dataset> : query the registry 306 Besides the dataset types explicitly listed in the policy, we create 307 additional, derived datasets for additional conveniences, e.g., reading 308 the header of an image, retrieving only the size of a catalog. 312 policy : `lsst.daf.persistence.Policy` 313 Policy with per-camera defaults already merged 314 rootStorage : `Storage subclass instance` 315 Interface to persisted repository data. 316 calibRoot : `Storage subclass instance` 317 Interface to persisted calib repository data 318 provided : `list` of `str` 319 Keys provided by the mapper 322 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
323 "obs_base",
"ImageMappingDictionary.paf",
"policy"))
324 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
325 "obs_base",
"ExposureMappingDictionary.paf",
"policy"))
326 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
327 "obs_base",
"CalibrationMappingDictionary.paf",
"policy"))
328 dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
329 "obs_base",
"DatasetMappingDictionary.paf",
"policy"))
333 (
"images", imgMappingPolicy, ImageMapping),
334 (
"exposures", expMappingPolicy, ExposureMapping),
335 (
"calibrations", calMappingPolicy, CalibrationMapping),
336 (
"datasets", dsMappingPolicy, DatasetMapping)
339 for name, defPolicy, cls
in mappingList:
341 datasets = policy[name]
344 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
345 if os.path.exists(defaultsPath):
346 datasets.merge(dafPersist.Policy(defaultsPath))
349 setattr(self, name, mappings)
350 for datasetType
in datasets.names(
True):
351 subPolicy = datasets[datasetType]
352 subPolicy.merge(defPolicy)
354 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
355 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
356 subPolicy=subPolicy):
357 components = subPolicy.get(
'composite')
358 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 359 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 360 python = subPolicy[
'python']
361 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
362 disassembler=disassembler,
366 for name, component
in components.items():
367 butlerComposite.add(id=name,
368 datasetType=component.get(
'datasetType'),
369 setter=component.get(
'setter',
None),
370 getter=component.get(
'getter',
None),
371 subset=component.get(
'subset',
False),
372 inputOnly=component.get(
'inputOnly',
False))
373 return butlerComposite
374 setattr(self,
"map_" + datasetType, compositeClosure)
378 if name ==
"calibrations":
380 provided=provided, dataRoot=rootStorage)
382 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
383 self.
keyDict.update(mapping.keys())
384 mappings[datasetType] = mapping
385 self.
mappings[datasetType] = mapping
386 if not hasattr(self,
"map_" + datasetType):
387 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
388 return mapping.map(mapper, dataId, write)
389 setattr(self,
"map_" + datasetType, mapClosure)
390 if not hasattr(self,
"query_" + datasetType):
391 def queryClosure(format, dataId, mapping=mapping):
392 return mapping.lookup(format, dataId)
393 setattr(self,
"query_" + datasetType, queryClosure)
394 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
395 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
396 return mapping.standardize(mapper, item, dataId)
397 setattr(self,
"std_" + datasetType, stdClosure)
399 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
400 """Set convenience methods on CameraMapper""" 401 mapName =
"map_" + datasetType +
"_" + suffix
402 bypassName =
"bypass_" + datasetType +
"_" + suffix
403 queryName =
"query_" + datasetType +
"_" + suffix
404 if not hasattr(self, mapName):
405 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
406 if not hasattr(self, bypassName):
407 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
408 bypassImpl = getattr(self,
"bypass_" + datasetType)
409 if bypassImpl
is not None:
410 setattr(self, bypassName, bypassImpl)
411 if not hasattr(self, queryName):
412 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
415 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
416 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
418 if subPolicy[
"storage"] ==
"FitsStorage":
419 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
420 readMetadata(location.getLocationsWithRoot()[0]))
423 addName =
"add_" + datasetType
424 if not hasattr(self, addName):
427 if name ==
"exposures":
428 setMethods(
"wcs", bypassImpl=
lambda datasetType, pythonType, location, dataId:
429 afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
430 setMethods(
"calib", bypassImpl=
lambda datasetType, pythonType, location, dataId:
431 afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
432 setMethods(
"visitInfo",
433 bypassImpl=
lambda datasetType, pythonType, location, dataId:
434 afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
436 bypassImpl=
lambda datasetType, pythonType, location, dataId:
437 afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
438 setMethods(
"detector",
439 mapImpl=
lambda dataId, write=
False:
440 dafPersist.ButlerLocation(
441 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
443 storageName=
"Internal",
444 locationList=
"ignored",
449 bypassImpl=
lambda datasetType, pythonType, location, dataId:
452 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
453 afwImage.bboxFromMetadata(
454 readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
456 elif name ==
"images":
457 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
458 afwImage.bboxFromMetadata(
459 readMetadata(location.getLocationsWithRoot()[0])))
461 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
462 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
463 readMetadata(os.path.join(location.getStorage().root,
464 location.getLocations()[0]), hdu=1))
467 if subPolicy[
"storage"] ==
"FitsStorage":
468 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
469 subId = dataId.copy()
471 loc = mapping.map(mapper, subId, write)
472 bbox = dataId[
'bbox']
473 llcX = bbox.getMinX()
474 llcY = bbox.getMinY()
475 width = bbox.getWidth()
476 height = bbox.getHeight()
477 loc.additionalData.set(
'llcX', llcX)
478 loc.additionalData.set(
'llcY', llcY)
479 loc.additionalData.set(
'width', width)
480 loc.additionalData.set(
'height', height)
481 if 'imageOrigin' in dataId:
482 loc.additionalData.set(
'imageOrigin',
483 dataId[
'imageOrigin'])
486 def querySubClosure(key, format, dataId, mapping=mapping):
487 subId = dataId.copy()
489 return mapping.lookup(format, subId)
490 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
492 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
494 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
495 readMetadata(os.path.join(location.getStorage().root,
496 location.getLocations()[0]),
497 hdu=1).getScalar(
"NAXIS2"))
500 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
501 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
502 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
503 location.getLocations()[0])))
505 def _computeCcdExposureId(self, dataId):
506 """Compute the 64-bit (long) identifier for a CCD exposure. 508 Subclasses must override 513 Data identifier with visit, ccd. 515 raise NotImplementedError()
517 def _computeCoaddExposureId(self, dataId, singleFilter):
518 """Compute the 64-bit (long) identifier for a coadd. 520 Subclasses must override 525 Data identifier with tract and patch. 526 singleFilter : `bool` 527 True means the desired ID is for a single-filter coadd, in which 528 case dataIdmust contain filter. 530 raise NotImplementedError()
532 def _search(self, path):
533 """Search for path in the associated repository's storage. 538 Path that describes an object in the repository associated with 540 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 541 indicator will be stripped when searching and so will match 542 filenames without the HDU indicator, e.g. 'foo.fits'. The path 543 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 548 The path for this object in the repository. Will return None if the 549 object can't be found. If the input argument path contained an HDU 550 indicator, the returned path will also contain the HDU indicator. 555 """Rename any existing object with the given type and dataId. 557 The CameraMapper implementation saves objects in a sequence of e.g.: 563 All of the backups will be placed in the output repo, however, and will 564 not be removed if they are found elsewhere in the _parent chain. This 565 means that the same file will be stored twice if the previous version was 566 found in an input repo. 575 def firstElement(list):
576 """Get the first element in the list, or None if that can't be done. 578 return list[0]
if list
is not None and len(list)
else None 581 newLocation = self.map(datasetType, dataId, write=
True)
582 newPath = newLocation.getLocations()[0]
583 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
584 path = firstElement(path)
586 while path
is not None:
588 oldPaths.append((n, path))
589 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
590 path = firstElement(path)
591 for n, oldPath
in reversed(oldPaths):
592 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
595 """Return supported keys. 600 List of keys usable in a dataset identifier 605 """Return a dict of supported keys and their value types for a given dataset 606 type at a given level of the key hierarchy. 611 Dataset type or None for all dataset types. 612 level : `str` or None 613 Level or None for all levels or '' for the default level for the 619 Keys are strings usable in a dataset identifier, values are their 627 if datasetType
is None:
628 keyDict = copy.copy(self.
keyDict)
631 if level
is not None and level
in self.
levels:
632 keyDict = copy.copy(keyDict)
633 for l
in self.
levels[level]:
648 """Return the name of the camera that this CameraMapper is for.""" 650 className = className[className.find(
'.'):-1]
651 m = re.search(
r'(\w+)Mapper', className)
653 m = re.search(
r"class '[\w.]*?(\w+)'", className)
655 return name[:1].lower() + name[1:]
if name
else '' 659 """Return the name of the package containing this CameraMapper.""" 661 raise ValueError(
'class variable packageName must not be None')
666 """Return the base directory of this package""" 670 """Map a camera dataset.""" 672 raise RuntimeError(
"No camera dataset available.")
674 return dafPersist.ButlerLocation(
675 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
677 storageName=
"ConfigStorage",
685 """Return the (preloaded) camera object. 688 raise RuntimeError(
"No camera dataset available.")
692 """Map defects dataset. 696 `lsst.daf.butler.ButlerLocation` 697 Minimal ButlerLocation containing just the locationList field 698 (just enough information that bypass_defects can use it). 701 if defectFitsPath
is None:
702 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
704 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
709 """Return a defect based on the butler location returned by map_defects 713 butlerLocation : `lsst.daf.persistence.ButlerLocation` 714 locationList = path to defects FITS file 716 Butler data ID; "ccd" must be set. 718 Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation 719 into an object, which is what we want for now, since that conversion is a bit tricky. 722 defectsFitsPath = butlerLocation.locationList[0]
723 with pyfits.open(defectsFitsPath)
as hduList:
724 for hdu
in hduList[1:]:
725 if hdu.header[
"name"] != detectorName:
729 for data
in hdu.data:
730 bbox = afwGeom.Box2I(
731 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
732 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
734 defectList.append(afwImage.DefectBase(bbox))
737 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
740 return dafPersist.ButlerLocation(
741 pythonType=
"lsst.obs.base.ExposureIdInfo",
743 storageName=
"Internal",
744 locationList=
"ignored",
751 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 752 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
753 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
757 """Disable standardization for bfKernel 759 bfKernel is a calibration product that is numpy array, 760 unlike other calibration products that are all images; 761 all calibration images are sent through _standardizeExposure 762 due to CalibrationMapping, but we don't want that to happen to bfKernel 767 """Standardize a raw dataset by converting it to an Exposure instead of an Image""" 769 trimmed=
False, setVisitInfo=
True)
772 """Map a sky policy.""" 773 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
774 "Internal",
None,
None, self,
778 """Standardize a sky policy by returning the one we use.""" 779 return self.skypolicy
787 def _getCcdKeyVal(self, dataId):
788 """Return CCD key and value used to look a defect in the defect registry 790 The default implementation simply returns ("ccd", full detector name) 794 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
796 """Set up a registry (usually SQLite3), trying a number of possible 804 Description of registry (for log messages) 808 Policy that contains the registry name, used if path is None. 810 Key in policy for registry path. 811 storage : Storage subclass 812 Repository Storage to look in. 813 searchParents : bool, optional 814 True if the search for a registry should follow any Butler v1 816 posixIfNoSql : bool, optional 817 If an sqlite registry is not found, will create a posix registry if 822 lsst.daf.persistence.Registry 825 if path
is None and policyKey
in policy:
826 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
827 if os.path.isabs(path):
828 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
829 if not storage.exists(path):
830 newPath = storage.instanceSearch(path)
832 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 834 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
838 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
846 if path
and (path.startswith(root)):
847 path = path[len(root +
'/'):]
848 except AttributeError:
854 def search(filename, description):
855 """Search for file in storage 860 Filename to search for 862 Description of file, for error message. 866 path : `str` or `None` 867 Path to file, or None 869 result = storage.instanceSearch(filename)
872 self.
log.debug(
"Unable to locate %s: %s", description, filename)
877 path = search(
"%s.pgsql" % name,
"%s in root" % description)
879 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
881 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
884 if not storage.exists(path):
885 newPath = storage.instanceSearch(path)
886 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 887 if newPath
is not None:
889 localFileObj = storage.getLocalFile(path)
890 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
891 registry = dafPersist.Registry.create(localFileObj.name)
893 elif not registry
and posixIfNoSql:
895 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
896 registry = dafPersist.PosixRegistry(storage.root)
902 def _transformId(self, dataId):
903 """Generate a standard ID dict from a camera-specific ID dict. 905 Canonical keys include: 906 - amp: amplifier name 907 - ccd: CCD name (in LSST this is a combination of raft and sensor) 908 The default implementation returns a copy of its input. 913 Dataset identifier; this must not be modified 918 Transformed dataset identifier. 923 def _mapActualToPath(self, template, actualId):
924 """Convert a template path to an actual path, using the actual data 925 identifier. This implementation is usually sufficient but can be 926 overridden by the subclass. 943 return template % transformedId
944 except Exception
as e:
945 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
949 """Convert a CCD name to a form useful as a filename 951 The default implementation converts spaces to underscores. 953 return ccdName.replace(
" ",
"_")
955 def _extractDetectorName(self, dataId):
956 """Extract the detector (CCD) name from the dataset identifier. 958 The name in question is the detector name used by lsst.afw.cameraGeom. 970 raise NotImplementedError(
"No _extractDetectorName() function specified")
972 def _extractAmpId(self, dataId):
973 """Extract the amplifier identifer from a dataset identifier. 975 .. note:: Deprecated in 11_0 977 amplifier identifier has two parts: the detector name for the CCD 978 containing the amplifier and index of the amplifier in the detector. 992 return (trDataId[
"ccd"], int(trDataId[
'amp']))
994 def _setAmpDetector(self, item, dataId, trimmed=True):
995 """Set the detector object in an Exposure for an amplifier. 997 Defects are also added to the Exposure based on the detector object. 1001 item : `lsst.afw.image.Exposure` 1002 Exposure to set the detector in. 1006 Should detector be marked as trimmed? (ignored) 1009 return self.
_setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1011 def _setCcdDetector(self, item, dataId, trimmed=True):
1012 """Set the detector object in an Exposure for a CCD. 1016 item : `lsst.afw.image.Exposure` 1017 Exposure to set the detector in. 1021 Should detector be marked as trimmed? (ignored) 1023 if item.getDetector()
is not None:
1027 detector = self.
camera[detectorName]
1028 item.setDetector(detector)
1030 def _setFilter(self, mapping, item, dataId):
1031 """Set the filter object in an Exposure. If the Exposure had a FILTER 1032 keyword, this was already processed during load. But if it didn't, 1033 use the filter from the registry. 1037 mapping : `lsst.obs.base.Mapping` 1038 Where to get the filter from. 1039 item : `lsst.afw.image.Exposure` 1040 Exposure to set the filter in. 1045 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 1046 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1049 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1052 actualId = mapping.need([
'filter'], dataId)
1053 filterName = actualId[
'filter']
1055 filterName = self.
filters[filterName]
1056 item.setFilter(afwImage.Filter(filterName))
1059 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1060 trimmed=True, setVisitInfo=True):
1061 """Default standardization function for images. 1063 This sets the Detector from the camera geometry 1064 and optionally set the Fiter. In both cases this saves 1065 having to persist some data in each exposure (or image). 1069 mapping : `lsst.obs.base.Mapping` 1070 Where to get the values from. 1071 item : image-like object 1072 Can be any of lsst.afw.image.Exposure, 1073 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1074 or lsst.afw.image.MaskedImage 1079 Set filter? Ignored if item is already an exposure 1081 Should detector be marked as trimmed? 1082 setVisitInfo : `bool` 1083 Should Exposure have its VisitInfo filled out from the metadata? 1087 `lsst.afw.image.Exposure` 1088 The standardized Exposure. 1091 item =
exposureFromImage(item, dataId, mapper=self, logger=self.
log, setVisitInfo=setVisitInfo)
1092 except Exception
as e:
1093 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1096 if mapping.level.lower() ==
"amp":
1098 elif mapping.level.lower() ==
"ccd":
1106 def _defectLookup(self, dataId):
1107 """Find the defects for a given CCD. 1117 Path to the defects file or None if not available. 1122 raise RuntimeError(
"No registry for defect lookup")
1126 dataIdForLookup = {
'visit': dataId[
'visit']}
1128 rows = self.
registry.lookup((
'taiObs'), (
'raw_visit'), dataIdForLookup)
1131 assert len(rows) == 1
1137 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1139 if not rows
or len(rows) == 0:
1142 return os.path.join(self.
defectPath, rows[0][0])
1144 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1145 (ccdVal, taiObs, len(rows),
", ".join([_[0]
for _
in rows])))
1147 def _makeCamera(self, policy, repositoryDir):
1148 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry 1150 Also set self.cameraDataLocation, if relevant (else it can be left None). 1152 This implementation assumes that policy contains an entry "camera" that points to the 1153 subdirectory in this package of camera data; specifically, that subdirectory must contain: 1154 - a file named `camera.py` that contains persisted camera config 1155 - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath 1159 policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy` 1160 Policy with per-camera defaults already merged 1161 (PexPolicy only for backward compatibility). 1162 repositoryDir : `str` 1163 Policy repository for the subclassing module (obtained with 1164 getRepositoryPath() on the per-camera default dictionary). 1166 if isinstance(policy, pexPolicy.Policy):
1167 policy = dafPersist.Policy(pexPolicy=policy)
1168 if 'camera' not in policy:
1169 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1170 cameraDataSubdir = policy[
'camera']
1172 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1173 cameraConfig = afwCameraGeom.CameraConfig()
1176 return afwCameraGeom.makeCameraFromPath(
1177 cameraConfig=cameraConfig,
1178 ampInfoPath=ampInfoPath,
1184 """Get the registry used by this mapper. 1189 The registry used by this mapper for this mapper's repository. 1194 """Stuff image compression settings into a daf.base.PropertySet 1196 This goes into the ButlerLocation's "additionalData", which gets 1197 passed into the boost::persistence framework. 1202 Type of dataset for which to get the image compression settings. 1208 additionalData : `lsst.daf.base.PropertySet` 1209 Image compression settings. 1211 mapping = self.
mappings[datasetType]
1212 recipeName = mapping.recipe
1213 storageType = mapping.storage
1215 return dafBase.PropertySet()
1217 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1218 (datasetType, storageType, recipeName))
1219 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1220 seed = hash(tuple(dataId.items())) % 2**31
1221 for plane
in (
"image",
"mask",
"variance"):
1222 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1223 recipe.set(plane +
".scaling.seed", seed)
1226 def _initWriteRecipes(self):
1227 """Read the recipes for writing files 1229 These recipes are currently used for configuring FITS compression, 1230 but they could have wider uses for configuring different flavors 1231 of the storage types. A recipe is referred to by a symbolic name, 1232 which has associated settings. These settings are stored as a 1233 `PropertySet` so they can easily be passed down to the 1234 boost::persistence framework as the "additionalData" parameter. 1236 The list of recipes is written in YAML. A default recipe and 1237 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1238 and these may be overridden or supplemented by the individual obs_* 1239 packages' own policy/writeRecipes.yaml files. 1241 Recipes are grouped by the storage type. Currently, only the 1242 ``FitsStorage`` storage type uses recipes, which uses it to 1243 configure FITS image compression. 1245 Each ``FitsStorage`` recipe for FITS compression should define 1246 "image", "mask" and "variance" entries, each of which may contain 1247 "compression" and "scaling" entries. Defaults will be provided for 1248 any missing elements under "compression" and "scaling". 1250 The allowed entries under "compression" are: 1252 * algorithm (string): compression algorithm to use 1253 * rows (int): number of rows per tile (0 = entire dimension) 1254 * columns (int): number of columns per tile (0 = entire dimension) 1255 * quantizeLevel (float): cfitsio quantization level 1257 The allowed entries under "scaling" are: 1259 * algorithm (string): scaling algorithm to use 1260 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1261 * fuzz (bool): fuzz the values when quantising floating-point values? 1262 * seed (long): seed for random number generator when fuzzing 1263 * maskPlanes (list of string): mask planes to ignore when doing statistics 1264 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1265 * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE) 1266 * bscale: manually specified BSCALE (for MANUAL scaling) 1267 * bzero: manually specified BSCALE (for MANUAL scaling) 1269 A very simple example YAML recipe: 1275 algorithm: GZIP_SHUFFLE 1279 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1280 recipes = dafPersist.Policy(recipesFile)
1281 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1282 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1283 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1284 supplements = dafPersist.Policy(supplementsFile)
1286 for entry
in validationMenu:
1287 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1289 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1290 (supplementsFile, entry, recipesFile, intersection))
1291 recipes.update(supplements)
1294 for storageType
in recipes.names(
True):
1295 if "default" not in recipes[storageType]:
1296 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1297 (storageType, recipesFile))
1298 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1302 """Generate an Exposure from an image-like object 1304 If the image is a DecoratedImage then also set its WCS and metadata 1305 (Image and MaskedImage are missing the necessary metadata 1306 and Exposure already has those set) 1310 image : Image-like object 1311 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1316 `lsst.afw.image.Exposure` 1317 Exposure containing input image. 1320 if isinstance(image, afwImage.MaskedImage):
1321 exposure = afwImage.makeExposure(image)
1322 elif isinstance(image, afwImage.DecoratedImage):
1323 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1324 metadata = image.getMetadata()
1326 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1327 exposure.setWcs(wcs)
1328 except pexExcept.TypeError
as e:
1331 logger = lsstLog.Log.getLogger(
"CameraMapper")
1332 logger.debug(
"wcs set to None; insufficient information found in metadata to create a valid wcs:" 1335 exposure.setMetadata(metadata)
1336 elif isinstance(image, afwImage.Exposure):
1339 metadata = exposure.getMetadata()
1342 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1346 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1347 if metadata
is not None:
1350 logger = lsstLog.Log.getLogger(
"CameraMapper")
1351 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1353 exposureId = mapper._computeCcdExposureId(dataId)
1354 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1356 exposure.getInfo().setVisitInfo(visitInfo)
1362 """Validate recipes for FitsStorage 1364 The recipes are supplemented with default values where appropriate. 1366 TODO: replace this custom validation code with Cerberus (DM-11846) 1370 recipes : `lsst.daf.persistence.Policy` 1371 FitsStorage recipes to validate. 1375 validated : `lsst.daf.base.PropertySet` 1376 Validated FitsStorage recipe. 1381 If validation fails. 1385 compressionSchema = {
1386 "algorithm":
"NONE",
1389 "quantizeLevel": 0.0,
1392 "algorithm":
"NONE",
1394 "maskPlanes": [
"NO_DATA"],
1396 "quantizeLevel": 4.0,
1403 def checkUnrecognized(entry, allowed, description):
1404 """Check to see if the entry contains unrecognised keywords""" 1405 unrecognized = set(entry.keys()) - set(allowed)
1408 "Unrecognized entries when parsing image compression recipe %s: %s" %
1409 (description, unrecognized))
1412 for name
in recipes.names(
True):
1413 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1414 rr = dafBase.PropertySet()
1415 validated[name] = rr
1416 for plane
in (
"image",
"mask",
"variance"):
1417 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1418 name +
"->" + plane)
1420 for settings, schema
in ((
"compression", compressionSchema),
1421 (
"scaling", scalingSchema)):
1422 prefix = plane +
"." + settings
1423 if settings
not in recipes[name][plane]:
1425 rr.set(prefix +
"." + key, schema[key])
1427 entry = recipes[name][plane][settings]
1428 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1430 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1431 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def _defectLookup(self, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.