25 from astropy.io
import fits
28 import lsst.daf.persistence
as dafPersist
29 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
30 import lsst.daf.base
as dafBase
31 import lsst.afw.geom
as afwGeom
32 import lsst.afw.image
as afwImage
33 import lsst.afw.table
as afwTable
34 from lsst.afw.fits
import readMetadata
35 import lsst.afw.cameraGeom
as afwCameraGeom
36 import lsst.log
as lsstLog
37 import lsst.pex.policy
as pexPolicy
39 from .exposureIdInfo
import ExposureIdInfo
40 from .makeRawVisitInfo
import MakeRawVisitInfo
43 __all__ = [
"CameraMapper",
"exposureFromImage"]
48 """CameraMapper is a base class for mappers that handle images from a 49 camera and products derived from them. This provides an abstraction layer 50 between the data on disk and the code. 52 Public methods: keys, queryMetadata, getDatasetTypes, map, 53 canStandardize, standardize 55 Mappers for specific data sources (e.g., CFHT Megacam, LSST 56 simulations, etc.) should inherit this class. 58 The CameraMapper manages datasets within a "root" directory. Note that 59 writing to a dataset present in the input root will hide the existing 60 dataset but not overwrite it. See #2160 for design discussion. 62 A camera is assumed to consist of one or more rafts, each composed of 63 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 64 (amps). A camera is also assumed to have a camera geometry description 65 (CameraGeom object) as a policy file, a filter description (Filter class 66 static configuration) as another policy file, and an optional defects 67 description directory. 69 Information from the camera geometry and defects are inserted into all 70 Exposure objects returned. 72 The mapper uses one or two registries to retrieve metadata about the 73 images. The first is a registry of all raw exposures. This must contain 74 the time of the observation. One or more tables (or the equivalent) 75 within the registry are used to look up data identifier components that 76 are not specified by the user (e.g. filter) and to return results for 77 metadata queries. The second is an optional registry of all calibration 78 data. This should contain validity start and end entries for each 79 calibration dataset in the same timescale as the observation time. 81 Subclasses will typically set MakeRawVisitInfoClass: 83 MakeRawVisitInfoClass: a class variable that points to a subclass of 84 MakeRawVisitInfo, a functor that creates an 85 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 87 Subclasses must provide the following methods: 89 _extractDetectorName(self, dataId): returns the detector name for a CCD 90 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 91 a dataset identifier referring to that CCD or a subcomponent of it. 93 _computeCcdExposureId(self, dataId): see below 95 _computeCoaddExposureId(self, dataId, singleFilter): see below 97 Subclasses may also need to override the following methods: 99 _transformId(self, dataId): transformation of a data identifier 100 from colloquial usage (e.g., "ccdname") to proper/actual usage 101 (e.g., "ccd"), including making suitable for path expansion (e.g. removing 102 commas). The default implementation does nothing. Note that this 103 method should not modify its input parameter. 105 getShortCcdName(self, ccdName): a static method that returns a shortened 106 name suitable for use as a filename. The default version converts spaces 109 _getCcdKeyVal(self, dataId): return a CCD key and value 110 by which to look up defects in the defects registry. 111 The default value returns ("ccd", detector name) 113 _mapActualToPath(self, template, actualId): convert a template path to an 114 actual path, using the actual dataset identifier. 116 The mapper's behaviors are largely specified by the policy file. 117 See the MapperDictionary.paf for descriptions of the available items. 119 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 120 mappings (see Mappings class). 122 Common default mappings for all subclasses can be specified in the 123 "policy/{images,exposures,calibrations,datasets}.yaml" files. This 124 provides a simple way to add a product to all camera mappers. 126 Functions to map (provide a path to the data given a dataset 127 identifier dictionary) and standardize (convert data into some standard 128 format or type) may be provided in the subclass as "map_{dataset type}" 129 and "std_{dataset type}", respectively. 131 If non-Exposure datasets cannot be retrieved using standard 132 daf_persistence methods alone, a "bypass_{dataset type}" function may be 133 provided in the subclass to return the dataset instead of using the 134 "datasets" subpolicy. 136 Implementations of map_camera and bypass_camera that should typically be 137 sufficient are provided in this base class. 143 - Handle defects the same was as all other calibration products, using the 145 - Instead of auto-loading the camera at construction time, load it from 146 the calibration registry 147 - Rewrite defects as AFW tables so we don't need astropy.io.fits to 148 unpersist them; then remove all mention of astropy.io.fits from this 155 MakeRawVisitInfoClass = MakeRawVisitInfo
158 PupilFactoryClass = afwCameraGeom.PupilFactory
160 def __init__(self, policy, repositoryDir,
161 root=None, registry=None, calibRoot=None, calibRegistry=None,
162 provided=None, parentRegistry=None, repositoryCfg=None):
163 """Initialize the CameraMapper. 167 policy : daf_persistence.Policy, 168 Can also be pexPolicy.Policy, only for backward compatibility. 169 Policy with per-camera defaults already merged. 170 repositoryDir : string 171 Policy repository for the subclassing module (obtained with 172 getRepositoryPath() on the per-camera default dictionary). 173 root : string, optional 174 Path to the root directory for data. 175 registry : string, optional 176 Path to registry with data's metadata. 177 calibRoot : string, optional 178 Root directory for calibrations. 179 calibRegistry : string, optional 180 Path to registry with calibrations' metadata. 181 provided : list of string, optional 182 Keys provided by the mapper. 183 parentRegistry : Registry subclass, optional 184 Registry from a parent repository that may be used to look up 186 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 187 The configuration information for the repository this mapper is 191 dafPersist.Mapper.__init__(self)
193 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
198 self.
root = repositoryCfg.root
201 if isinstance(policy, pexPolicy.Policy):
202 policy = dafPersist.Policy(policy)
204 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 205 if repoPolicy
is not None:
206 policy.update(repoPolicy)
208 defaultPolicyFile = dafPersist.Policy.defaultPolicyFile(
"obs_base",
209 "MapperDictionary.paf",
211 dictPolicy = dafPersist.Policy(defaultPolicyFile)
212 policy.merge(dictPolicy)
216 if 'levels' in policy:
217 levelsPolicy = policy[
'levels']
218 for key
in levelsPolicy.names(
True):
219 self.
levels[key] = set(levelsPolicy.asArray(key))
222 if 'defaultSubLevels' in policy:
228 root = dafPersist.LogicalLocation(root).locString()
238 if calibRoot
is not None:
239 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
240 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
243 calibRoot = policy.get(
'calibRoot',
None)
245 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
247 if calibStorage
is None:
255 posixIfNoSql=(
not parentRegistry))
258 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
259 if needCalibRegistry:
262 "calibRegistryPath", calibStorage,
266 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
267 "calibRoot ivar:%s or policy['calibRoot']:%s" %
268 (calibRoot, policy.get(
'calibRoot',
None)))
285 if 'defects' in policy:
286 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
287 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
288 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
296 raise ValueError(
'class variable packageName must not be None')
300 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
301 """Initialize mappings 303 For each of the dataset types that we want to be able to read, there 304 are methods that can be created to support them: 305 * map_<dataset> : determine the path for dataset 306 * std_<dataset> : standardize the retrieved dataset 307 * bypass_<dataset> : retrieve the dataset (bypassing the usual 309 * query_<dataset> : query the registry 311 Besides the dataset types explicitly listed in the policy, we create 312 additional, derived datasets for additional conveniences, 313 e.g., reading the header of an image, retrieving only the size of a 318 policy : `lsst.daf.persistence.Policy` 319 Policy with per-camera defaults already merged 320 rootStorage : `Storage subclass instance` 321 Interface to persisted repository data. 322 calibRoot : `Storage subclass instance` 323 Interface to persisted calib repository data 324 provided : `list` of `str` 325 Keys provided by the mapper 328 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
329 "obs_base",
"ImageMappingDictionary.paf",
"policy"))
330 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
331 "obs_base",
"ExposureMappingDictionary.paf",
"policy"))
332 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
333 "obs_base",
"CalibrationMappingDictionary.paf",
"policy"))
334 dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
335 "obs_base",
"DatasetMappingDictionary.paf",
"policy"))
339 (
"images", imgMappingPolicy, ImageMapping),
340 (
"exposures", expMappingPolicy, ExposureMapping),
341 (
"calibrations", calMappingPolicy, CalibrationMapping),
342 (
"datasets", dsMappingPolicy, DatasetMapping)
345 for name, defPolicy, cls
in mappingList:
347 datasets = policy[name]
350 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
351 if os.path.exists(defaultsPath):
352 datasets.merge(dafPersist.Policy(defaultsPath))
355 setattr(self, name, mappings)
356 for datasetType
in datasets.names(
True):
357 subPolicy = datasets[datasetType]
358 subPolicy.merge(defPolicy)
360 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
361 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
362 subPolicy=subPolicy):
363 components = subPolicy.get(
'composite')
364 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 365 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 366 python = subPolicy[
'python']
367 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
368 disassembler=disassembler,
372 for name, component
in components.items():
373 butlerComposite.add(id=name,
374 datasetType=component.get(
'datasetType'),
375 setter=component.get(
'setter',
None),
376 getter=component.get(
'getter',
None),
377 subset=component.get(
'subset',
False),
378 inputOnly=component.get(
'inputOnly',
False))
379 return butlerComposite
380 setattr(self,
"map_" + datasetType, compositeClosure)
384 if name ==
"calibrations":
386 provided=provided, dataRoot=rootStorage)
388 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
389 self.
keyDict.update(mapping.keys())
390 mappings[datasetType] = mapping
391 self.
mappings[datasetType] = mapping
392 if not hasattr(self,
"map_" + datasetType):
393 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
394 return mapping.map(mapper, dataId, write)
395 setattr(self,
"map_" + datasetType, mapClosure)
396 if not hasattr(self,
"query_" + datasetType):
397 def queryClosure(format, dataId, mapping=mapping):
398 return mapping.lookup(format, dataId)
399 setattr(self,
"query_" + datasetType, queryClosure)
400 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
401 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
402 return mapping.standardize(mapper, item, dataId)
403 setattr(self,
"std_" + datasetType, stdClosure)
405 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
406 """Set convenience methods on CameraMapper""" 407 mapName =
"map_" + datasetType +
"_" + suffix
408 bypassName =
"bypass_" + datasetType +
"_" + suffix
409 queryName =
"query_" + datasetType +
"_" + suffix
410 if not hasattr(self, mapName):
411 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
412 if not hasattr(self, bypassName):
413 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
414 bypassImpl = getattr(self,
"bypass_" + datasetType)
415 if bypassImpl
is not None:
416 setattr(self, bypassName, bypassImpl)
417 if not hasattr(self, queryName):
418 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
421 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
422 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
424 if subPolicy[
"storage"] ==
"FitsStorage":
425 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
426 readMetadata(location.getLocationsWithRoot()[0]))
429 addName =
"add_" + datasetType
430 if not hasattr(self, addName):
433 if name ==
"exposures":
434 setMethods(
"wcs", bypassImpl=
lambda datasetType, pythonType, location, dataId:
435 afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
436 setMethods(
"calib", bypassImpl=
lambda datasetType, pythonType, location, dataId:
437 afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
438 setMethods(
"visitInfo",
439 bypassImpl=
lambda datasetType, pythonType, location, dataId:
440 afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
442 bypassImpl=
lambda datasetType, pythonType, location, dataId:
443 afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
444 setMethods(
"detector",
445 mapImpl=
lambda dataId, write=
False:
446 dafPersist.ButlerLocation(
447 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
449 storageName=
"Internal",
450 locationList=
"ignored",
455 bypassImpl=
lambda datasetType, pythonType, location, dataId:
458 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
459 afwImage.bboxFromMetadata(
460 readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
462 elif name ==
"images":
463 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
464 afwImage.bboxFromMetadata(
465 readMetadata(location.getLocationsWithRoot()[0])))
467 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
468 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
469 readMetadata(os.path.join(location.getStorage().root,
470 location.getLocations()[0]), hdu=1))
473 if subPolicy[
"storage"] ==
"FitsStorage":
474 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
475 subId = dataId.copy()
477 loc = mapping.map(mapper, subId, write)
478 bbox = dataId[
'bbox']
479 llcX = bbox.getMinX()
480 llcY = bbox.getMinY()
481 width = bbox.getWidth()
482 height = bbox.getHeight()
483 loc.additionalData.set(
'llcX', llcX)
484 loc.additionalData.set(
'llcY', llcY)
485 loc.additionalData.set(
'width', width)
486 loc.additionalData.set(
'height', height)
487 if 'imageOrigin' in dataId:
488 loc.additionalData.set(
'imageOrigin',
489 dataId[
'imageOrigin'])
492 def querySubClosure(key, format, dataId, mapping=mapping):
493 subId = dataId.copy()
495 return mapping.lookup(format, subId)
496 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
498 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
500 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
501 readMetadata(os.path.join(location.getStorage().root,
502 location.getLocations()[0]),
503 hdu=1).getScalar(
"NAXIS2"))
506 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
507 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
508 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
509 location.getLocations()[0])))
511 def _computeCcdExposureId(self, dataId):
512 """Compute the 64-bit (long) identifier for a CCD exposure. 514 Subclasses must override 519 Data identifier with visit, ccd. 521 raise NotImplementedError()
523 def _computeCoaddExposureId(self, dataId, singleFilter):
524 """Compute the 64-bit (long) identifier for a coadd. 526 Subclasses must override 531 Data identifier with tract and patch. 532 singleFilter : `bool` 533 True means the desired ID is for a single-filter coadd, in which 534 case dataIdmust contain filter. 536 raise NotImplementedError()
538 def _search(self, path):
539 """Search for path in the associated repository's storage. 544 Path that describes an object in the repository associated with 546 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 547 indicator will be stripped when searching and so will match 548 filenames without the HDU indicator, e.g. 'foo.fits'. The path 549 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 554 The path for this object in the repository. Will return None if the 555 object can't be found. If the input argument path contained an HDU 556 indicator, the returned path will also contain the HDU indicator. 561 """Rename any existing object with the given type and dataId. 563 The CameraMapper implementation saves objects in a sequence of e.g.: 569 All of the backups will be placed in the output repo, however, and will 570 not be removed if they are found elsewhere in the _parent chain. This 571 means that the same file will be stored twice if the previous version 572 was found in an input repo. 581 def firstElement(list):
582 """Get the first element in the list, or None if that can't be 585 return list[0]
if list
is not None and len(list)
else None 588 newLocation = self.map(datasetType, dataId, write=
True)
589 newPath = newLocation.getLocations()[0]
590 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
591 path = firstElement(path)
593 while path
is not None:
595 oldPaths.append((n, path))
596 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
597 path = firstElement(path)
598 for n, oldPath
in reversed(oldPaths):
599 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
602 """Return supported keys. 607 List of keys usable in a dataset identifier 612 """Return a dict of supported keys and their value types for a given 613 dataset type at a given level of the key hierarchy. 618 Dataset type or None for all dataset types. 619 level : `str` or None 620 Level or None for all levels or '' for the default level for the 626 Keys are strings usable in a dataset identifier, values are their 634 if datasetType
is None:
635 keyDict = copy.copy(self.
keyDict)
638 if level
is not None and level
in self.
levels:
639 keyDict = copy.copy(keyDict)
640 for l
in self.
levels[level]:
655 """Return the name of the camera that this CameraMapper is for.""" 657 className = className[className.find(
'.'):-1]
658 m = re.search(
r'(\w+)Mapper', className)
660 m = re.search(
r"class '[\w.]*?(\w+)'", className)
662 return name[:1].lower() + name[1:]
if name
else '' 666 """Return the name of the package containing this CameraMapper.""" 668 raise ValueError(
'class variable packageName must not be None')
673 """Return the base directory of this package""" 677 """Map a camera dataset.""" 679 raise RuntimeError(
"No camera dataset available.")
681 return dafPersist.ButlerLocation(
682 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
684 storageName=
"ConfigStorage",
692 """Return the (preloaded) camera object. 695 raise RuntimeError(
"No camera dataset available.")
699 """Map defects dataset. 703 `lsst.daf.butler.ButlerLocation` 704 Minimal ButlerLocation containing just the locationList field 705 (just enough information that bypass_defects can use it). 708 if defectFitsPath
is None:
709 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
711 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
716 """Return a defect based on the butler location returned by map_defects 720 butlerLocation : `lsst.daf.persistence.ButlerLocation` 721 locationList = path to defects FITS file 723 Butler data ID; "ccd" must be set. 725 Note: the name "bypass_XXX" means the butler makes no attempt to 726 convert the ButlerLocation into an object, which is what we want for 727 now, since that conversion is a bit tricky. 730 defectsFitsPath = butlerLocation.locationList[0]
731 with fits.open(defectsFitsPath)
as hduList:
732 for hdu
in hduList[1:]:
733 if hdu.header[
"name"] != detectorName:
737 for data
in hdu.data:
738 bbox = afwGeom.Box2I(
739 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
740 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
742 defectList.append(afwImage.DefectBase(bbox))
745 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
748 return dafPersist.ButlerLocation(
749 pythonType=
"lsst.obs.base.ExposureIdInfo",
751 storageName=
"Internal",
752 locationList=
"ignored",
759 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 760 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
761 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
765 """Disable standardization for bfKernel 767 bfKernel is a calibration product that is numpy array, 768 unlike other calibration products that are all images; 769 all calibration images are sent through _standardizeExposure 770 due to CalibrationMapping, but we don't want that to happen to bfKernel 775 """Standardize a raw dataset by converting it to an Exposure instead 778 trimmed=
False, setVisitInfo=
True)
781 """Map a sky policy.""" 782 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
783 "Internal",
None,
None, self,
787 """Standardize a sky policy by returning the one we use.""" 788 return self.skypolicy
796 def _getCcdKeyVal(self, dataId):
797 """Return CCD key and value used to look a defect in the defect 800 The default implementation simply returns ("ccd", full detector name) 804 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
806 """Set up a registry (usually SQLite3), trying a number of possible 814 Description of registry (for log messages) 818 Policy that contains the registry name, used if path is None. 820 Key in policy for registry path. 821 storage : Storage subclass 822 Repository Storage to look in. 823 searchParents : bool, optional 824 True if the search for a registry should follow any Butler v1 826 posixIfNoSql : bool, optional 827 If an sqlite registry is not found, will create a posix registry if 832 lsst.daf.persistence.Registry 835 if path
is None and policyKey
in policy:
836 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
837 if os.path.isabs(path):
838 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
839 if not storage.exists(path):
840 newPath = storage.instanceSearch(path)
842 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 844 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
848 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
856 if path
and (path.startswith(root)):
857 path = path[len(root +
'/'):]
858 except AttributeError:
864 def search(filename, description):
865 """Search for file in storage 870 Filename to search for 872 Description of file, for error message. 876 path : `str` or `None` 877 Path to file, or None 879 result = storage.instanceSearch(filename)
882 self.
log.debug(
"Unable to locate %s: %s", description, filename)
887 path = search(
"%s.pgsql" % name,
"%s in root" % description)
889 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
891 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
894 if not storage.exists(path):
895 newPath = storage.instanceSearch(path)
896 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 897 if newPath
is not None:
899 localFileObj = storage.getLocalFile(path)
900 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
901 registry = dafPersist.Registry.create(localFileObj.name)
903 elif not registry
and posixIfNoSql:
905 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
906 registry = dafPersist.PosixRegistry(storage.root)
912 def _transformId(self, dataId):
913 """Generate a standard ID dict from a camera-specific ID dict. 915 Canonical keys include: 916 - amp: amplifier name 917 - ccd: CCD name (in LSST this is a combination of raft and sensor) 918 The default implementation returns a copy of its input. 923 Dataset identifier; this must not be modified 928 Transformed dataset identifier. 933 def _mapActualToPath(self, template, actualId):
934 """Convert a template path to an actual path, using the actual data 935 identifier. This implementation is usually sufficient but can be 936 overridden by the subclass. 953 return template % transformedId
954 except Exception
as e:
955 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
959 """Convert a CCD name to a form useful as a filename 961 The default implementation converts spaces to underscores. 963 return ccdName.replace(
" ",
"_")
965 def _extractDetectorName(self, dataId):
966 """Extract the detector (CCD) name from the dataset identifier. 968 The name in question is the detector name used by lsst.afw.cameraGeom. 980 raise NotImplementedError(
"No _extractDetectorName() function specified")
982 def _extractAmpId(self, dataId):
983 """Extract the amplifier identifer from a dataset identifier. 985 .. note:: Deprecated in 11_0 987 amplifier identifier has two parts: the detector name for the CCD 988 containing the amplifier and index of the amplifier in the detector. 1002 return (trDataId[
"ccd"], int(trDataId[
'amp']))
1004 def _setAmpDetector(self, item, dataId, trimmed=True):
1005 """Set the detector object in an Exposure for an amplifier. 1007 Defects are also added to the Exposure based on the detector object. 1011 item : `lsst.afw.image.Exposure` 1012 Exposure to set the detector in. 1016 Should detector be marked as trimmed? (ignored) 1019 return self.
_setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1021 def _setCcdDetector(self, item, dataId, trimmed=True):
1022 """Set the detector object in an Exposure for a CCD. 1026 item : `lsst.afw.image.Exposure` 1027 Exposure to set the detector in. 1031 Should detector be marked as trimmed? (ignored) 1033 if item.getDetector()
is not None:
1037 detector = self.
camera[detectorName]
1038 item.setDetector(detector)
1040 def _setFilter(self, mapping, item, dataId):
1041 """Set the filter object in an Exposure. If the Exposure had a FILTER 1042 keyword, this was already processed during load. But if it didn't, 1043 use the filter from the registry. 1047 mapping : `lsst.obs.base.Mapping` 1048 Where to get the filter from. 1049 item : `lsst.afw.image.Exposure` 1050 Exposure to set the filter in. 1055 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 1056 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1059 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1062 actualId = mapping.need([
'filter'], dataId)
1063 filterName = actualId[
'filter']
1065 filterName = self.
filters[filterName]
1066 item.setFilter(afwImage.Filter(filterName))
1069 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1070 trimmed=True, setVisitInfo=True):
1071 """Default standardization function for images. 1073 This sets the Detector from the camera geometry 1074 and optionally set the Fiter. In both cases this saves 1075 having to persist some data in each exposure (or image). 1079 mapping : `lsst.obs.base.Mapping` 1080 Where to get the values from. 1081 item : image-like object 1082 Can be any of lsst.afw.image.Exposure, 1083 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1084 or lsst.afw.image.MaskedImage 1089 Set filter? Ignored if item is already an exposure 1091 Should detector be marked as trimmed? 1092 setVisitInfo : `bool` 1093 Should Exposure have its VisitInfo filled out from the metadata? 1097 `lsst.afw.image.Exposure` 1098 The standardized Exposure. 1101 item =
exposureFromImage(item, dataId, mapper=self, logger=self.
log, setVisitInfo=setVisitInfo)
1102 except Exception
as e:
1103 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1106 if mapping.level.lower() ==
"amp":
1108 elif mapping.level.lower() ==
"ccd":
1116 def _defectLookup(self, dataId):
1117 """Find the defects for a given CCD. 1127 Path to the defects file or None if not available. 1132 raise RuntimeError(
"No registry for defect lookup")
1136 dataIdForLookup = {
'visit': dataId[
'visit']}
1138 rows = self.
registry.lookup((
'taiObs'), (
'raw_visit'), dataIdForLookup)
1141 assert len(rows) == 1
1147 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1149 if not rows
or len(rows) == 0:
1152 return os.path.join(self.
defectPath, rows[0][0])
1154 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1155 (ccdVal, taiObs, len(rows),
", ".join([_[0]
for _
in rows])))
1157 def _makeCamera(self, policy, repositoryDir):
1158 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing 1161 Also set self.cameraDataLocation, if relevant (else it can be left 1164 This implementation assumes that policy contains an entry "camera" 1165 that points to the subdirectory in this package of camera data; 1166 specifically, that subdirectory must contain: 1167 - a file named `camera.py` that contains persisted camera config 1168 - ampInfo table FITS files, as required by 1169 lsst.afw.cameraGeom.makeCameraFromPath 1173 policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy` 1174 Policy with per-camera defaults already merged 1175 (PexPolicy only for backward compatibility). 1176 repositoryDir : `str` 1177 Policy repository for the subclassing module (obtained with 1178 getRepositoryPath() on the per-camera default dictionary). 1180 if isinstance(policy, pexPolicy.Policy):
1181 policy = dafPersist.Policy(pexPolicy=policy)
1182 if 'camera' not in policy:
1183 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1184 cameraDataSubdir = policy[
'camera']
1186 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1187 cameraConfig = afwCameraGeom.CameraConfig()
1190 return afwCameraGeom.makeCameraFromPath(
1191 cameraConfig=cameraConfig,
1192 ampInfoPath=ampInfoPath,
1198 """Get the registry used by this mapper. 1203 The registry used by this mapper for this mapper's repository. 1208 """Stuff image compression settings into a daf.base.PropertySet 1210 This goes into the ButlerLocation's "additionalData", which gets 1211 passed into the boost::persistence framework. 1216 Type of dataset for which to get the image compression settings. 1222 additionalData : `lsst.daf.base.PropertySet` 1223 Image compression settings. 1225 mapping = self.
mappings[datasetType]
1226 recipeName = mapping.recipe
1227 storageType = mapping.storage
1229 return dafBase.PropertySet()
1231 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1232 (datasetType, storageType, recipeName))
1233 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1234 seed = hash(tuple(dataId.items())) % 2**31
1235 for plane
in (
"image",
"mask",
"variance"):
1236 if recipe.exists(plane +
".scaling.seed")
and recipe.getScalar(plane +
".scaling.seed") == 0:
1237 recipe.set(plane +
".scaling.seed", seed)
1240 def _initWriteRecipes(self):
1241 """Read the recipes for writing files 1243 These recipes are currently used for configuring FITS compression, 1244 but they could have wider uses for configuring different flavors 1245 of the storage types. A recipe is referred to by a symbolic name, 1246 which has associated settings. These settings are stored as a 1247 `PropertySet` so they can easily be passed down to the 1248 boost::persistence framework as the "additionalData" parameter. 1250 The list of recipes is written in YAML. A default recipe and 1251 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1252 and these may be overridden or supplemented by the individual obs_* 1253 packages' own policy/writeRecipes.yaml files. 1255 Recipes are grouped by the storage type. Currently, only the 1256 ``FitsStorage`` storage type uses recipes, which uses it to 1257 configure FITS image compression. 1259 Each ``FitsStorage`` recipe for FITS compression should define 1260 "image", "mask" and "variance" entries, each of which may contain 1261 "compression" and "scaling" entries. Defaults will be provided for 1262 any missing elements under "compression" and "scaling". 1264 The allowed entries under "compression" are: 1266 * algorithm (string): compression algorithm to use 1267 * rows (int): number of rows per tile (0 = entire dimension) 1268 * columns (int): number of columns per tile (0 = entire dimension) 1269 * quantizeLevel (float): cfitsio quantization level 1271 The allowed entries under "scaling" are: 1273 * algorithm (string): scaling algorithm to use 1274 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1275 * fuzz (bool): fuzz the values when quantising floating-point values? 1276 * seed (long): seed for random number generator when fuzzing 1277 * maskPlanes (list of string): mask planes to ignore when doing 1279 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1280 * quantizePad: number of stdev to allow on the low side (for 1281 STDEV_POSITIVE/NEGATIVE) 1282 * bscale: manually specified BSCALE (for MANUAL scaling) 1283 * bzero: manually specified BSCALE (for MANUAL scaling) 1285 A very simple example YAML recipe: 1291 algorithm: GZIP_SHUFFLE 1295 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1296 recipes = dafPersist.Policy(recipesFile)
1297 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1298 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1299 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1300 supplements = dafPersist.Policy(supplementsFile)
1302 for entry
in validationMenu:
1303 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1305 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1306 (supplementsFile, entry, recipesFile, intersection))
1307 recipes.update(supplements)
1310 for storageType
in recipes.names(
True):
1311 if "default" not in recipes[storageType]:
1312 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1313 (storageType, recipesFile))
1314 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1318 """Generate an Exposure from an image-like object 1320 If the image is a DecoratedImage then also set its WCS and metadata 1321 (Image and MaskedImage are missing the necessary metadata 1322 and Exposure already has those set) 1326 image : Image-like object 1327 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1332 `lsst.afw.image.Exposure` 1333 Exposure containing input image. 1336 if isinstance(image, afwImage.MaskedImage):
1337 exposure = afwImage.makeExposure(image)
1338 elif isinstance(image, afwImage.DecoratedImage):
1339 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1340 metadata = image.getMetadata()
1342 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1343 exposure.setWcs(wcs)
1344 except pexExcept.TypeError
as e:
1347 logger = lsstLog.Log.getLogger(
"CameraMapper")
1348 logger.debug(
"wcs set to None; insufficient information found in metadata to create a valid wcs:" 1351 exposure.setMetadata(metadata)
1352 elif isinstance(image, afwImage.Exposure):
1355 metadata = exposure.getMetadata()
1358 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1362 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1363 if metadata
is not None:
1366 logger = lsstLog.Log.getLogger(
"CameraMapper")
1367 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1369 exposureId = mapper._computeCcdExposureId(dataId)
1370 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1372 exposure.getInfo().setVisitInfo(visitInfo)
1378 """Validate recipes for FitsStorage 1380 The recipes are supplemented with default values where appropriate. 1382 TODO: replace this custom validation code with Cerberus (DM-11846) 1386 recipes : `lsst.daf.persistence.Policy` 1387 FitsStorage recipes to validate. 1391 validated : `lsst.daf.base.PropertySet` 1392 Validated FitsStorage recipe. 1397 If validation fails. 1401 compressionSchema = {
1402 "algorithm":
"NONE",
1405 "quantizeLevel": 0.0,
1408 "algorithm":
"NONE",
1410 "maskPlanes": [
"NO_DATA"],
1412 "quantizeLevel": 4.0,
1419 def checkUnrecognized(entry, allowed, description):
1420 """Check to see if the entry contains unrecognised keywords""" 1421 unrecognized = set(entry.keys()) - set(allowed)
1424 "Unrecognized entries when parsing image compression recipe %s: %s" %
1425 (description, unrecognized))
1428 for name
in recipes.names(
True):
1429 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1430 rr = dafBase.PropertySet()
1431 validated[name] = rr
1432 for plane
in (
"image",
"mask",
"variance"):
1433 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1434 name +
"->" + plane)
1436 for settings, schema
in ((
"compression", compressionSchema),
1437 (
"scaling", scalingSchema)):
1438 prefix = plane +
"." + settings
1439 if settings
not in recipes[name][plane]:
1441 rr.set(prefix +
"." + key, schema[key])
1443 entry = recipes[name][plane][settings]
1444 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1446 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1447 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def _defectLookup(self, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.