23 from builtins
import str
29 import lsst.daf.persistence
as dafPersist
30 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base
as dafBase
32 import lsst.afw.geom
as afwGeom
33 import lsst.afw.image
as afwImage
34 import lsst.afw.table
as afwTable
35 from lsst.afw.fits
import readMetadata
36 import lsst.afw.cameraGeom
as afwCameraGeom
37 import lsst.log
as lsstLog
38 import lsst.pex.policy
as pexPolicy
40 from .exposureIdInfo
import ExposureIdInfo
41 from .makeRawVisitInfo
import MakeRawVisitInfo
44 __all__ = [
"CameraMapper",
"exposureFromImage"]
49 """CameraMapper is a base class for mappers that handle images from a 50 camera and products derived from them. This provides an abstraction layer 51 between the data on disk and the code. 53 Public methods: keys, queryMetadata, getDatasetTypes, map, 54 canStandardize, standardize 56 Mappers for specific data sources (e.g., CFHT Megacam, LSST 57 simulations, etc.) should inherit this class. 59 The CameraMapper manages datasets within a "root" directory. Note that 60 writing to a dataset present in the input root will hide the existing 61 dataset but not overwrite it. See #2160 for design discussion. 63 A camera is assumed to consist of one or more rafts, each composed of 64 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 65 (amps). A camera is also assumed to have a camera geometry description 66 (CameraGeom object) as a policy file, a filter description (Filter class 67 static configuration) as another policy file, and an optional defects 68 description directory. 70 Information from the camera geometry and defects are inserted into all 71 Exposure objects returned. 73 The mapper uses one or two registries to retrieve metadata about the 74 images. The first is a registry of all raw exposures. This must contain 75 the time of the observation. One or more tables (or the equivalent) 76 within the registry are used to look up data identifier components that 77 are not specified by the user (e.g. filter) and to return results for 78 metadata queries. The second is an optional registry of all calibration 79 data. This should contain validity start and end entries for each 80 calibration dataset in the same timescale as the observation time. 82 Subclasses will typically set MakeRawVisitInfoClass: 84 MakeRawVisitInfoClass: a class variable that points to a subclass of 85 MakeRawVisitInfo, a functor that creates an 86 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 88 Subclasses must provide the following methods: 90 _extractDetectorName(self, dataId): returns the detector name for a CCD 91 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 92 a dataset identifier referring to that CCD or a subcomponent of it. 94 _computeCcdExposureId(self, dataId): see below 96 _computeCoaddExposureId(self, dataId, singleFilter): see below 98 Subclasses may also need to override the following methods: 100 _transformId(self, dataId): transformation of a data identifier 101 from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"), 102 including making suitable for path expansion (e.g. removing commas). 103 The default implementation does nothing. Note that this 104 method should not modify its input parameter. 106 getShortCcdName(self, ccdName): a static method that returns a shortened name 107 suitable for use as a filename. The default version converts spaces to underscores. 109 _getCcdKeyVal(self, dataId): return a CCD key and value 110 by which to look up defects in the defects registry. 111 The default value returns ("ccd", detector name) 113 _mapActualToPath(self, template, actualId): convert a template path to an 114 actual path, using the actual dataset identifier. 116 The mapper's behaviors are largely specified by the policy file. 117 See the MapperDictionary.paf for descriptions of the available items. 119 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 120 mappings (see Mappings class). 122 Common default mappings for all subclasses can be specified in the 123 "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides 124 a simple way to add a product to all camera mappers. 126 Functions to map (provide a path to the data given a dataset 127 identifier dictionary) and standardize (convert data into some standard 128 format or type) may be provided in the subclass as "map_{dataset type}" 129 and "std_{dataset type}", respectively. 131 If non-Exposure datasets cannot be retrieved using standard 132 daf_persistence methods alone, a "bypass_{dataset type}" function may be 133 provided in the subclass to return the dataset instead of using the 134 "datasets" subpolicy. 136 Implementations of map_camera and bypass_camera that should typically be 137 sufficient are provided in this base class. 143 - Handle defects the same was as all other calibration products, using the calibration registry 144 - Instead of auto-loading the camera at construction time, load it from the calibration registry 145 - Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention 146 of pyfits from this package. 152 MakeRawVisitInfoClass = MakeRawVisitInfo
155 PupilFactoryClass = afwCameraGeom.PupilFactory
157 def __init__(self, policy, repositoryDir,
158 root=None, registry=None, calibRoot=None, calibRegistry=None,
159 provided=None, parentRegistry=None, repositoryCfg=None):
160 """Initialize the CameraMapper. 164 policy : daf_persistence.Policy, 165 Can also be pexPolicy.Policy, only for backward compatibility. 166 Policy with per-camera defaults already merged. 167 repositoryDir : string 168 Policy repository for the subclassing module (obtained with 169 getRepositoryPath() on the per-camera default dictionary). 170 root : string, optional 171 Path to the root directory for data. 172 registry : string, optional 173 Path to registry with data's metadata. 174 calibRoot : string, optional 175 Root directory for calibrations. 176 calibRegistry : string, optional 177 Path to registry with calibrations' metadata. 178 provided : list of string, optional 179 Keys provided by the mapper. 180 parentRegistry : Registry subclass, optional 181 Registry from a parent repository that may be used to look up 183 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 184 The configuration information for the repository this mapper is 188 dafPersist.Mapper.__init__(self)
190 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
195 self.
root = repositoryCfg.root
198 if isinstance(policy, pexPolicy.Policy):
199 policy = dafPersist.Policy(policy)
201 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 202 if repoPolicy
is not None:
203 policy.update(repoPolicy)
205 defaultPolicyFile = dafPersist.Policy.defaultPolicyFile(
"obs_base",
206 "MapperDictionary.paf",
208 dictPolicy = dafPersist.Policy(defaultPolicyFile)
209 policy.merge(dictPolicy)
213 if 'levels' in policy:
214 levelsPolicy = policy[
'levels']
215 for key
in levelsPolicy.names(
True):
216 self.
levels[key] = set(levelsPolicy.asArray(key))
219 if 'defaultSubLevels' in policy:
225 root = dafPersist.LogicalLocation(root).locString()
235 if calibRoot
is not None:
236 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
237 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
240 calibRoot = policy.get(
'calibRoot',
None)
242 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
244 if calibStorage
is None:
252 posixIfNoSql=(
not parentRegistry))
255 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
256 if needCalibRegistry:
259 "calibRegistryPath", calibStorage,
263 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
264 "calibRoot ivar:%s or policy['calibRoot']:%s" %
265 (calibRoot, policy.get(
'calibRoot',
None)))
282 if 'defects' in policy:
283 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
284 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
285 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
296 raise ValueError(
'class variable packageName must not be None')
300 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
301 """Initialize mappings 303 For each of the dataset types that we want to be able to read, there are 304 methods that can be created to support them: 305 * map_<dataset> : determine the path for dataset 306 * std_<dataset> : standardize the retrieved dataset 307 * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery) 308 * query_<dataset> : query the registry 310 Besides the dataset types explicitly listed in the policy, we create 311 additional, derived datasets for additional conveniences, e.g., reading 312 the header of an image, retrieving only the size of a catalog. 316 policy : `lsst.daf.persistence.Policy` 317 Policy with per-camera defaults already merged 318 rootStorage : `Storage subclass instance` 319 Interface to persisted repository data. 320 calibRoot : `Storage subclass instance` 321 Interface to persisted calib repository data 322 provided : `list` of `str` 323 Keys provided by the mapper 326 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
327 "obs_base",
"ImageMappingDictionary.paf",
"policy"))
328 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
329 "obs_base",
"ExposureMappingDictionary.paf",
"policy"))
330 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
331 "obs_base",
"CalibrationMappingDictionary.paf",
"policy"))
332 dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
333 "obs_base",
"DatasetMappingDictionary.paf",
"policy"))
337 (
"images", imgMappingPolicy, ImageMapping),
338 (
"exposures", expMappingPolicy, ExposureMapping),
339 (
"calibrations", calMappingPolicy, CalibrationMapping),
340 (
"datasets", dsMappingPolicy, DatasetMapping)
343 for name, defPolicy, cls
in mappingList:
345 datasets = policy[name]
348 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
349 if os.path.exists(defaultsPath):
350 datasets.merge(dafPersist.Policy(defaultsPath))
353 setattr(self, name, mappings)
354 for datasetType
in datasets.names(
True):
355 subPolicy = datasets[datasetType]
356 subPolicy.merge(defPolicy)
358 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
359 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
360 subPolicy=subPolicy):
361 components = subPolicy.get(
'composite')
362 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 363 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 364 python = subPolicy[
'python']
365 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
366 disassembler=disassembler,
370 for name, component
in components.items():
371 butlerComposite.add(id=name,
372 datasetType=component.get(
'datasetType'),
373 setter=component.get(
'setter',
None),
374 getter=component.get(
'getter',
None),
375 subset=component.get(
'subset',
False),
376 inputOnly=component.get(
'inputOnly',
False))
377 return butlerComposite
378 setattr(self,
"map_" + datasetType, compositeClosure)
382 if name ==
"calibrations":
384 provided=provided, dataRoot=rootStorage)
386 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
387 self.
keyDict.update(mapping.keys())
388 mappings[datasetType] = mapping
389 self.
mappings[datasetType] = mapping
390 if not hasattr(self,
"map_" + datasetType):
391 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
392 return mapping.map(mapper, dataId, write)
393 setattr(self,
"map_" + datasetType, mapClosure)
394 if not hasattr(self,
"query_" + datasetType):
395 def queryClosure(format, dataId, mapping=mapping):
396 return mapping.lookup(format, dataId)
397 setattr(self,
"query_" + datasetType, queryClosure)
398 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
399 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
400 return mapping.standardize(mapper, item, dataId)
401 setattr(self,
"std_" + datasetType, stdClosure)
403 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
404 """Set convenience methods on CameraMapper""" 405 mapName =
"map_" + datasetType +
"_" + suffix
406 bypassName =
"bypass_" + datasetType +
"_" + suffix
407 queryName =
"query_" + datasetType +
"_" + suffix
408 if not hasattr(self, mapName):
409 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
410 if not hasattr(self, bypassName):
411 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
412 bypassImpl = getattr(self,
"bypass_" + datasetType)
413 if bypassImpl
is not None:
414 setattr(self, bypassName, bypassImpl)
415 if not hasattr(self, queryName):
416 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
419 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
420 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
422 if subPolicy[
"storage"] ==
"FitsStorage":
423 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
424 readMetadata(location.getLocationsWithRoot()[0]))
427 addName =
"add_" + datasetType
428 if not hasattr(self, addName):
431 if name ==
"exposures":
432 setMethods(
"wcs", bypassImpl=
lambda datasetType, pythonType, location, dataId:
433 afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
434 setMethods(
"calib", bypassImpl=
lambda datasetType, pythonType, location, dataId:
435 afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
436 setMethods(
"visitInfo",
437 bypassImpl=
lambda datasetType, pythonType, location, dataId:
438 afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
440 bypassImpl=
lambda datasetType, pythonType, location, dataId:
441 afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
442 setMethods(
"detector",
443 mapImpl=
lambda dataId, write=
False:
444 dafPersist.ButlerLocation(
445 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
447 storageName=
"Internal",
448 locationList=
"ignored",
453 bypassImpl=
lambda datasetType, pythonType, location, dataId:
456 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
457 afwImage.bboxFromMetadata(
458 readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
460 elif name ==
"images":
461 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
462 afwImage.bboxFromMetadata(
463 readMetadata(location.getLocationsWithRoot()[0])))
465 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
466 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
467 readMetadata(os.path.join(location.getStorage().root,
468 location.getLocations()[0]), hdu=1))
471 if subPolicy[
"storage"] ==
"FitsStorage":
472 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
473 subId = dataId.copy()
475 loc = mapping.map(mapper, subId, write)
476 bbox = dataId[
'bbox']
477 llcX = bbox.getMinX()
478 llcY = bbox.getMinY()
479 width = bbox.getWidth()
480 height = bbox.getHeight()
481 loc.additionalData.set(
'llcX', llcX)
482 loc.additionalData.set(
'llcY', llcY)
483 loc.additionalData.set(
'width', width)
484 loc.additionalData.set(
'height', height)
485 if 'imageOrigin' in dataId:
486 loc.additionalData.set(
'imageOrigin',
487 dataId[
'imageOrigin'])
490 def querySubClosure(key, format, dataId, mapping=mapping):
491 subId = dataId.copy()
493 return mapping.lookup(format, subId)
494 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
496 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
498 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
499 readMetadata(os.path.join(location.getStorage().root,
500 location.getLocations()[0]),
501 hdu=1).get(
"NAXIS2"))
504 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
505 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
506 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
507 location.getLocations()[0])))
509 def _computeCcdExposureId(self, dataId):
510 """Compute the 64-bit (long) identifier for a CCD exposure. 512 Subclasses must override 517 Data identifier with visit, ccd. 519 raise NotImplementedError()
521 def _computeCoaddExposureId(self, dataId, singleFilter):
522 """Compute the 64-bit (long) identifier for a coadd. 524 Subclasses must override 529 Data identifier with tract and patch. 530 singleFilter : `bool` 531 True means the desired ID is for a single-filter coadd, in which 532 case dataIdmust contain filter. 534 raise NotImplementedError()
536 def _search(self, path):
537 """Search for path in the associated repository's storage. 542 Path that describes an object in the repository associated with 544 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 545 indicator will be stripped when searching and so will match 546 filenames without the HDU indicator, e.g. 'foo.fits'. The path 547 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 552 The path for this object in the repository. Will return None if the 553 object can't be found. If the input argument path contained an HDU 554 indicator, the returned path will also contain the HDU indicator. 559 """Rename any existing object with the given type and dataId. 561 The CameraMapper implementation saves objects in a sequence of e.g.: 567 All of the backups will be placed in the output repo, however, and will 568 not be removed if they are found elsewhere in the _parent chain. This 569 means that the same file will be stored twice if the previous version was 570 found in an input repo. 579 def firstElement(list):
580 """Get the first element in the list, or None if that can't be done. 582 return list[0]
if list
is not None and len(list)
else None 585 newLocation = self.map(datasetType, dataId, write=
True)
586 newPath = newLocation.getLocations()[0]
587 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
588 path = firstElement(path)
590 while path
is not None:
592 oldPaths.append((n, path))
593 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
594 path = firstElement(path)
595 for n, oldPath
in reversed(oldPaths):
596 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
599 """Return supported keys. 604 List of keys usable in a dataset identifier 609 """Return a dict of supported keys and their value types for a given dataset 610 type at a given level of the key hierarchy. 615 Dataset type or None for all dataset types. 616 level : `str` or None 617 Level or None for all levels or '' for the default level for the 623 Keys are strings usable in a dataset identifier, values are their 631 if datasetType
is None:
632 keyDict = copy.copy(self.
keyDict)
635 if level
is not None and level
in self.
levels:
636 keyDict = copy.copy(keyDict)
637 for l
in self.
levels[level]:
652 """Return the name of the camera that this CameraMapper is for.""" 654 className = className[className.find(
'.'):-1]
655 m = re.search(
r'(\w+)Mapper', className)
657 m = re.search(
r"class '[\w.]*?(\w+)'", className)
659 return name[:1].lower() + name[1:]
if name
else '' 663 """Return the name of the package containing this CameraMapper.""" 665 raise ValueError(
'class variable packageName must not be None')
670 """Return the base directory of this package""" 674 """Map a camera dataset.""" 676 raise RuntimeError(
"No camera dataset available.")
678 return dafPersist.ButlerLocation(
679 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
681 storageName=
"ConfigStorage",
689 """Return the (preloaded) camera object. 692 raise RuntimeError(
"No camera dataset available.")
696 """Map defects dataset. 700 `lsst.daf.butler.ButlerLocation` 701 Minimal ButlerLocation containing just the locationList field 702 (just enough information that bypass_defects can use it). 705 if defectFitsPath
is None:
706 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
708 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
713 """Return a defect based on the butler location returned by map_defects 717 butlerLocation : `lsst.daf.persistence.ButlerLocation` 718 locationList = path to defects FITS file 720 Butler data ID; "ccd" must be set. 722 Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation 723 into an object, which is what we want for now, since that conversion is a bit tricky. 726 defectsFitsPath = butlerLocation.locationList[0]
727 with pyfits.open(defectsFitsPath)
as hduList:
728 for hdu
in hduList[1:]:
729 if hdu.header[
"name"] != detectorName:
733 for data
in hdu.data:
734 bbox = afwGeom.Box2I(
735 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
736 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
738 defectList.append(afwImage.DefectBase(bbox))
741 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
744 return dafPersist.ButlerLocation(
745 pythonType=
"lsst.obs.base.ExposureIdInfo",
747 storageName=
"Internal",
748 locationList=
"ignored",
755 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 756 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
757 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
761 """Disable standardization for bfKernel 763 bfKernel is a calibration product that is numpy array, 764 unlike other calibration products that are all images; 765 all calibration images are sent through _standardizeExposure 766 due to CalibrationMapping, but we don't want that to happen to bfKernel 771 """Standardize a raw dataset by converting it to an Exposure instead of an Image""" 773 trimmed=
False, setVisitInfo=
True)
776 """Map a sky policy.""" 777 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
778 "Internal",
None,
None, self,
782 """Standardize a sky policy by returning the one we use.""" 791 def _getCcdKeyVal(self, dataId):
792 """Return CCD key and value used to look a defect in the defect registry 794 The default implementation simply returns ("ccd", full detector name) 798 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
800 """Set up a registry (usually SQLite3), trying a number of possible 808 Description of registry (for log messages) 812 Policy that contains the registry name, used if path is None. 814 Key in policy for registry path. 815 storage : Storage subclass 816 Repository Storage to look in. 817 searchParents : bool, optional 818 True if the search for a registry should follow any Butler v1 820 posixIfNoSql : bool, optional 821 If an sqlite registry is not found, will create a posix registry if 826 lsst.daf.persistence.Registry 829 if path
is None and policyKey
in policy:
830 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
831 if os.path.isabs(path):
832 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
833 if not storage.exists(path):
834 newPath = storage.instanceSearch(path)
836 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 838 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
842 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
850 if path
and (path.startswith(root)):
851 path = path[len(root +
'/'):]
852 except AttributeError:
858 def search(filename, description):
859 """Search for file in storage 864 Filename to search for 866 Description of file, for error message. 870 path : `str` or `None` 871 Path to file, or None 873 result = storage.instanceSearch(filename)
876 self.
log.debug(
"Unable to locate %s: %s", description, filename)
881 path = search(
"%s.pgsql" % name,
"%s in root" % description)
883 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
885 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
888 if not storage.exists(path):
889 newPath = storage.instanceSearch(path)
890 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 891 if newPath
is not None:
893 localFileObj = storage.getLocalFile(path)
894 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
895 registry = dafPersist.Registry.create(localFileObj.name)
897 elif not registry
and posixIfNoSql:
899 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
900 registry = dafPersist.PosixRegistry(storage.root)
906 def _transformId(self, dataId):
907 """Generate a standard ID dict from a camera-specific ID dict. 909 Canonical keys include: 910 - amp: amplifier name 911 - ccd: CCD name (in LSST this is a combination of raft and sensor) 912 The default implementation returns a copy of its input. 917 Dataset identifier; this must not be modified 922 Transformed dataset identifier. 927 def _mapActualToPath(self, template, actualId):
928 """Convert a template path to an actual path, using the actual data 929 identifier. This implementation is usually sufficient but can be 930 overridden by the subclass. 947 return template % transformedId
948 except Exception
as e:
949 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
953 """Convert a CCD name to a form useful as a filename 955 The default implementation converts spaces to underscores. 957 return ccdName.replace(
" ",
"_")
959 def _extractDetectorName(self, dataId):
960 """Extract the detector (CCD) name from the dataset identifier. 962 The name in question is the detector name used by lsst.afw.cameraGeom. 974 raise NotImplementedError(
"No _extractDetectorName() function specified")
976 def _extractAmpId(self, dataId):
977 """Extract the amplifier identifer from a dataset identifier. 979 .. note:: Deprecated in 11_0 981 amplifier identifier has two parts: the detector name for the CCD 982 containing the amplifier and index of the amplifier in the detector. 996 return (trDataId[
"ccd"], int(trDataId[
'amp']))
998 def _setAmpDetector(self, item, dataId, trimmed=True):
999 """Set the detector object in an Exposure for an amplifier. 1001 Defects are also added to the Exposure based on the detector object. 1005 item : `lsst.afw.image.Exposure` 1006 Exposure to set the detector in. 1010 Should detector be marked as trimmed? (ignored) 1013 return self.
_setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1015 def _setCcdDetector(self, item, dataId, trimmed=True):
1016 """Set the detector object in an Exposure for a CCD. 1020 item : `lsst.afw.image.Exposure` 1021 Exposure to set the detector in. 1025 Should detector be marked as trimmed? (ignored) 1027 if item.getDetector()
is not None:
1031 detector = self.
camera[detectorName]
1032 item.setDetector(detector)
1034 def _setFilter(self, mapping, item, dataId):
1035 """Set the filter object in an Exposure. If the Exposure had a FILTER 1036 keyword, this was already processed during load. But if it didn't, 1037 use the filter from the registry. 1041 mapping : `lsst.obs.base.Mapping` 1042 Where to get the filter from. 1043 item : `lsst.afw.image.Exposure` 1044 Exposure to set the filter in. 1049 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 1050 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1053 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1056 actualId = mapping.need([
'filter'], dataId)
1057 filterName = actualId[
'filter']
1059 filterName = self.
filters[filterName]
1060 item.setFilter(afwImage.Filter(filterName))
1063 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1064 trimmed=True, setVisitInfo=True):
1065 """Default standardization function for images. 1067 This sets the Detector from the camera geometry 1068 and optionally set the Fiter. In both cases this saves 1069 having to persist some data in each exposure (or image). 1073 mapping : `lsst.obs.base.Mapping` 1074 Where to get the values from. 1075 item : image-like object 1076 Can be any of lsst.afw.image.Exposure, 1077 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1078 or lsst.afw.image.MaskedImage 1083 Set filter? Ignored if item is already an exposure 1085 Should detector be marked as trimmed? 1086 setVisitInfo : `bool` 1087 Should Exposure have its VisitInfo filled out from the metadata? 1091 `lsst.afw.image.Exposure` 1092 The standardized Exposure. 1095 item =
exposureFromImage(item, dataId, mapper=self, logger=self.
log, setVisitInfo=setVisitInfo)
1096 except Exception
as e:
1097 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1100 if mapping.level.lower() ==
"amp":
1102 elif mapping.level.lower() ==
"ccd":
1110 def _defectLookup(self, dataId):
1111 """Find the defects for a given CCD. 1121 Path to the defects file or None if not available. 1126 raise RuntimeError(
"No registry for defect lookup")
1130 dataIdForLookup = {
'visit': dataId[
'visit']}
1132 rows = self.
registry.lookup((
'taiObs'), (
'raw_visit'), dataIdForLookup)
1135 assert len(rows) == 1
1141 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1143 if not rows
or len(rows) == 0:
1146 return os.path.join(self.
defectPath, rows[0][0])
1148 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1149 (ccdVal, taiObs, len(rows),
", ".join([_[0]
for _
in rows])))
1151 def _makeCamera(self, policy, repositoryDir):
1152 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry 1154 Also set self.cameraDataLocation, if relevant (else it can be left None). 1156 This implementation assumes that policy contains an entry "camera" that points to the 1157 subdirectory in this package of camera data; specifically, that subdirectory must contain: 1158 - a file named `camera.py` that contains persisted camera config 1159 - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath 1163 policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy` 1164 Policy with per-camera defaults already merged 1165 (PexPolicy only for backward compatibility). 1166 repositoryDir : `str` 1167 Policy repository for the subclassing module (obtained with 1168 getRepositoryPath() on the per-camera default dictionary). 1170 if isinstance(policy, pexPolicy.Policy):
1171 policy = dafPersist.Policy(pexPolicy=policy)
1172 if 'camera' not in policy:
1173 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1174 cameraDataSubdir = policy[
'camera']
1176 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1177 cameraConfig = afwCameraGeom.CameraConfig()
1180 return afwCameraGeom.makeCameraFromPath(
1181 cameraConfig=cameraConfig,
1182 ampInfoPath=ampInfoPath,
1188 """Get the registry used by this mapper. 1193 The registry used by this mapper for this mapper's repository. 1198 """Stuff image compression settings into a daf.base.PropertySet 1200 This goes into the ButlerLocation's "additionalData", which gets 1201 passed into the boost::persistence framework. 1206 Type of dataset for which to get the image compression settings. 1212 additionalData : `lsst.daf.base.PropertySet` 1213 Image compression settings. 1215 mapping = self.
mappings[datasetType]
1216 recipeName = mapping.recipe
1217 storageType = mapping.storage
1219 return dafBase.PropertySet()
1221 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1222 (datasetType, storageType, recipeName))
1223 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1224 seed = hash(tuple(dataId.items())) % 2**31
1225 for plane
in (
"image",
"mask",
"variance"):
1226 if recipe.exists(plane +
".scaling.seed")
and recipe.get(plane +
".scaling.seed") == 0:
1227 recipe.set(plane +
".scaling.seed", seed)
1230 def _initWriteRecipes(self):
1231 """Read the recipes for writing files 1233 These recipes are currently used for configuring FITS compression, 1234 but they could have wider uses for configuring different flavors 1235 of the storage types. A recipe is referred to by a symbolic name, 1236 which has associated settings. These settings are stored as a 1237 `PropertySet` so they can easily be passed down to the 1238 boost::persistence framework as the "additionalData" parameter. 1240 The list of recipes is written in YAML. A default recipe and 1241 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1242 and these may be overridden or supplemented by the individual obs_* 1243 packages' own policy/writeRecipes.yaml files. 1245 Recipes are grouped by the storage type. Currently, only the 1246 ``FitsStorage`` storage type uses recipes, which uses it to 1247 configure FITS image compression. 1249 Each ``FitsStorage`` recipe for FITS compression should define 1250 "image", "mask" and "variance" entries, each of which may contain 1251 "compression" and "scaling" entries. Defaults will be provided for 1252 any missing elements under "compression" and "scaling". 1254 The allowed entries under "compression" are: 1256 * algorithm (string): compression algorithm to use 1257 * rows (int): number of rows per tile (0 = entire dimension) 1258 * columns (int): number of columns per tile (0 = entire dimension) 1259 * quantizeLevel (float): cfitsio quantization level 1261 The allowed entries under "scaling" are: 1263 * algorithm (string): scaling algorithm to use 1264 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1265 * fuzz (bool): fuzz the values when quantising floating-point values? 1266 * seed (long): seed for random number generator when fuzzing 1267 * maskPlanes (list of string): mask planes to ignore when doing statistics 1268 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1269 * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE) 1270 * bscale: manually specified BSCALE (for MANUAL scaling) 1271 * bzero: manually specified BSCALE (for MANUAL scaling) 1273 A very simple example YAML recipe: 1279 algorithm: GZIP_SHUFFLE 1283 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1284 recipes = dafPersist.Policy(recipesFile)
1285 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1286 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1287 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1288 supplements = dafPersist.Policy(supplementsFile)
1290 for entry
in validationMenu:
1291 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1293 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1294 (supplementsFile, entry, recipesFile, intersection))
1295 recipes.update(supplements)
1298 for storageType
in recipes.names(
True):
1299 if "default" not in recipes[storageType]:
1300 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1301 (storageType, recipesFile))
1302 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1306 """Generate an Exposure from an image-like object 1308 If the image is a DecoratedImage then also set its WCS and metadata 1309 (Image and MaskedImage are missing the necessary metadata 1310 and Exposure already has those set) 1314 image : Image-like object 1315 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1320 `lsst.afw.image.Exposure` 1321 Exposure containing input image. 1324 if isinstance(image, afwImage.MaskedImage):
1325 exposure = afwImage.makeExposure(image)
1326 elif isinstance(image, afwImage.DecoratedImage):
1327 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1328 metadata = image.getMetadata()
1330 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1331 exposure.setWcs(wcs)
1332 except pexExcept.TypeError
as e:
1335 logger = lsstLog.Log.getLogger(
"CameraMapper")
1336 logger.warn(
"wcs set to None; insufficient information found in metadata to create a valid wcs: " 1339 exposure.setMetadata(metadata)
1340 elif isinstance(image, afwImage.Exposure):
1343 metadata = exposure.getMetadata()
1346 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1350 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1351 if metadata
is not None:
1354 logger = lsstLog.Log.getLogger(
"CameraMapper")
1355 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1357 exposureId = mapper._computeCcdExposureId(dataId)
1358 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1360 exposure.getInfo().setVisitInfo(visitInfo)
1366 """Validate recipes for FitsStorage 1368 The recipes are supplemented with default values where appropriate. 1370 TODO: replace this custom validation code with Cerberus (DM-11846) 1374 recipes : `lsst.daf.persistence.Policy` 1375 FitsStorage recipes to validate. 1379 validated : `lsst.daf.base.PropertySet` 1380 Validated FitsStorage recipe. 1385 If validation fails. 1389 compressionSchema = {
1390 "algorithm":
"NONE",
1393 "quantizeLevel": 0.0,
1396 "algorithm":
"NONE",
1398 "maskPlanes": [
"NO_DATA"],
1400 "quantizeLevel": 4.0,
1407 def checkUnrecognized(entry, allowed, description):
1408 """Check to see if the entry contains unrecognised keywords""" 1409 unrecognized = set(entry.keys()) - set(allowed)
1412 "Unrecognized entries when parsing image compression recipe %s: %s" %
1413 (description, unrecognized))
1416 for name
in recipes.names(
True):
1417 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1418 rr = dafBase.PropertySet()
1419 validated[name] = rr
1420 for plane
in (
"image",
"mask",
"variance"):
1421 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1422 name +
"->" + plane)
1424 for settings, schema
in ((
"compression", compressionSchema),
1425 (
"scaling", scalingSchema)):
1426 prefix = plane +
"." + settings
1427 if settings
not in recipes[name][plane]:
1429 rr.set(prefix +
"." + key, schema[key])
1431 entry = recipes[name][plane][settings]
1432 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1434 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1435 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def _defectLookup(self, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.