23 from builtins
import str
29 import lsst.daf.persistence
as dafPersist
30 from .
import ImageMapping, ExposureMapping, CalibrationMapping, DatasetMapping
31 import lsst.daf.base
as dafBase
32 import lsst.afw.geom
as afwGeom
33 import lsst.afw.image
as afwImage
34 import lsst.afw.table
as afwTable
35 from lsst.afw.fits
import readMetadata
36 import lsst.afw.cameraGeom
as afwCameraGeom
37 import lsst.log
as lsstLog
38 import lsst.pex.policy
as pexPolicy
40 from .exposureIdInfo
import ExposureIdInfo
41 from .makeRawVisitInfo
import MakeRawVisitInfo
44 __all__ = [
"CameraMapper",
"exposureFromImage"]
49 """CameraMapper is a base class for mappers that handle images from a 50 camera and products derived from them. This provides an abstraction layer 51 between the data on disk and the code. 53 Public methods: keys, queryMetadata, getDatasetTypes, map, 54 canStandardize, standardize 56 Mappers for specific data sources (e.g., CFHT Megacam, LSST 57 simulations, etc.) should inherit this class. 59 The CameraMapper manages datasets within a "root" directory. Note that 60 writing to a dataset present in the input root will hide the existing 61 dataset but not overwrite it. See #2160 for design discussion. 63 A camera is assumed to consist of one or more rafts, each composed of 64 multiple CCDs. Each CCD is in turn composed of one or more amplifiers 65 (amps). A camera is also assumed to have a camera geometry description 66 (CameraGeom object) as a policy file, a filter description (Filter class 67 static configuration) as another policy file, and an optional defects 68 description directory. 70 Information from the camera geometry and defects are inserted into all 71 Exposure objects returned. 73 The mapper uses one or two registries to retrieve metadata about the 74 images. The first is a registry of all raw exposures. This must contain 75 the time of the observation. One or more tables (or the equivalent) 76 within the registry are used to look up data identifier components that 77 are not specified by the user (e.g. filter) and to return results for 78 metadata queries. The second is an optional registry of all calibration 79 data. This should contain validity start and end entries for each 80 calibration dataset in the same timescale as the observation time. 82 Subclasses will typically set MakeRawVisitInfoClass: 84 MakeRawVisitInfoClass: a class variable that points to a subclass of 85 MakeRawVisitInfo, a functor that creates an 86 lsst.afw.image.VisitInfo from the FITS metadata of a raw image. 88 Subclasses must provide the following methods: 90 _extractDetectorName(self, dataId): returns the detector name for a CCD 91 (e.g., "CFHT 21", "R:1,2 S:3,4") as used in the AFW CameraGeom class given 92 a dataset identifier referring to that CCD or a subcomponent of it. 94 _computeCcdExposureId(self, dataId): see below 96 _computeCoaddExposureId(self, dataId, singleFilter): see below 98 Subclasses may also need to override the following methods: 100 _transformId(self, dataId): transformation of a data identifier 101 from colloquial usage (e.g., "ccdname") to proper/actual usage (e.g., "ccd"), 102 including making suitable for path expansion (e.g. removing commas). 103 The default implementation does nothing. Note that this 104 method should not modify its input parameter. 106 getShortCcdName(self, ccdName): a static method that returns a shortened name 107 suitable for use as a filename. The default version converts spaces to underscores. 109 _getCcdKeyVal(self, dataId): return a CCD key and value 110 by which to look up defects in the defects registry. 111 The default value returns ("ccd", detector name) 113 _mapActualToPath(self, template, actualId): convert a template path to an 114 actual path, using the actual dataset identifier. 116 The mapper's behaviors are largely specified by the policy file. 117 See the MapperDictionary.paf for descriptions of the available items. 119 The 'exposures', 'calibrations', and 'datasets' subpolicies configure 120 mappings (see Mappings class). 122 Common default mappings for all subclasses can be specified in the 123 "policy/{images,exposures,calibrations,datasets}.yaml" files. This provides 124 a simple way to add a product to all camera mappers. 126 Functions to map (provide a path to the data given a dataset 127 identifier dictionary) and standardize (convert data into some standard 128 format or type) may be provided in the subclass as "map_{dataset type}" 129 and "std_{dataset type}", respectively. 131 If non-Exposure datasets cannot be retrieved using standard 132 daf_persistence methods alone, a "bypass_{dataset type}" function may be 133 provided in the subclass to return the dataset instead of using the 134 "datasets" subpolicy. 136 Implementations of map_camera and bypass_camera that should typically be 137 sufficient are provided in this base class. 143 - Handle defects the same was as all other calibration products, using the calibration registry 144 - Instead of auto-loading the camera at construction time, load it from the calibration registry 145 - Rewrite defects as AFW tables so we don't need pyfits to unpersist them; then remove all mention 146 of pyfits from this package. 152 MakeRawVisitInfoClass = MakeRawVisitInfo
155 PupilFactoryClass = afwCameraGeom.PupilFactory
157 def __init__(self, policy, repositoryDir,
158 root=None, registry=None, calibRoot=None, calibRegistry=None,
159 provided=None, parentRegistry=None, repositoryCfg=None):
160 """Initialize the CameraMapper. 164 policy : daf_persistence.Policy, 165 Can also be pexPolicy.Policy, only for backward compatibility. 166 Policy with per-camera defaults already merged. 167 repositoryDir : string 168 Policy repository for the subclassing module (obtained with 169 getRepositoryPath() on the per-camera default dictionary). 170 root : string, optional 171 Path to the root directory for data. 172 registry : string, optional 173 Path to registry with data's metadata. 174 calibRoot : string, optional 175 Root directory for calibrations. 176 calibRegistry : string, optional 177 Path to registry with calibrations' metadata. 178 provided : list of string, optional 179 Keys provided by the mapper. 180 parentRegistry : Registry subclass, optional 181 Registry from a parent repository that may be used to look up 183 repositoryCfg : daf_persistence.RepositoryCfg or None, optional 184 The configuration information for the repository this mapper is 188 dafPersist.Mapper.__init__(self)
190 self.
log = lsstLog.Log.getLogger(
"CameraMapper")
195 self.
root = repositoryCfg.root
198 if isinstance(policy, pexPolicy.Policy):
199 policy = dafPersist.Policy(policy)
201 repoPolicy = repositoryCfg.policy
if repositoryCfg
else None 202 if repoPolicy
is not None:
203 policy.update(repoPolicy)
205 defaultPolicyFile = dafPersist.Policy.defaultPolicyFile(
"obs_base",
206 "MapperDictionary.paf",
208 dictPolicy = dafPersist.Policy(defaultPolicyFile)
209 policy.merge(dictPolicy)
213 if 'levels' in policy:
214 levelsPolicy = policy[
'levels']
215 for key
in levelsPolicy.names(
True):
216 self.
levels[key] = set(levelsPolicy.asArray(key))
219 if 'defaultSubLevels' in policy:
225 root = dafPersist.LogicalLocation(root).locString()
235 if calibRoot
is not None:
236 calibRoot = dafPersist.Storage.absolutePath(root, calibRoot)
237 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
240 calibRoot = policy.get(
'calibRoot',
None)
242 calibStorage = dafPersist.Storage.makeFromURI(uri=calibRoot,
244 if calibStorage
is None:
252 posixIfNoSql=(
not parentRegistry))
255 needCalibRegistry = policy.get(
'needCalibRegistry',
None)
256 if needCalibRegistry:
259 "calibRegistryPath", calibStorage,
263 "'needCalibRegistry' is true in Policy, but was unable to locate a repo at " +
264 "calibRoot ivar:%s or policy['calibRoot']:%s" %
265 (calibRoot, policy.get(
'calibRoot',
None)))
282 if 'defects' in policy:
283 self.
defectPath = os.path.join(repositoryDir, policy[
'defects'])
284 defectRegistryLocation = os.path.join(self.
defectPath,
"defectRegistry.sqlite3")
285 self.
defectRegistry = dafPersist.Registry.create(defectRegistryLocation)
293 raise ValueError(
'class variable packageName must not be None')
297 def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None):
298 """Initialize mappings 300 For each of the dataset types that we want to be able to read, there are 301 methods that can be created to support them: 302 * map_<dataset> : determine the path for dataset 303 * std_<dataset> : standardize the retrieved dataset 304 * bypass_<dataset> : retrieve the dataset (bypassing the usual retrieval machinery) 305 * query_<dataset> : query the registry 307 Besides the dataset types explicitly listed in the policy, we create 308 additional, derived datasets for additional conveniences, e.g., reading 309 the header of an image, retrieving only the size of a catalog. 313 policy : `lsst.daf.persistence.Policy` 314 Policy with per-camera defaults already merged 315 rootStorage : `Storage subclass instance` 316 Interface to persisted repository data. 317 calibRoot : `Storage subclass instance` 318 Interface to persisted calib repository data 319 provided : `list` of `str` 320 Keys provided by the mapper 323 imgMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
324 "obs_base",
"ImageMappingDictionary.paf",
"policy"))
325 expMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
326 "obs_base",
"ExposureMappingDictionary.paf",
"policy"))
327 calMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
328 "obs_base",
"CalibrationMappingDictionary.paf",
"policy"))
329 dsMappingPolicy = dafPersist.Policy(dafPersist.Policy.defaultPolicyFile(
330 "obs_base",
"DatasetMappingDictionary.paf",
"policy"))
334 (
"images", imgMappingPolicy, ImageMapping),
335 (
"exposures", expMappingPolicy, ExposureMapping),
336 (
"calibrations", calMappingPolicy, CalibrationMapping),
337 (
"datasets", dsMappingPolicy, DatasetMapping)
340 for name, defPolicy, cls
in mappingList:
342 datasets = policy[name]
345 defaultsPath = os.path.join(
getPackageDir(
"obs_base"),
"policy", name +
".yaml")
346 if os.path.exists(defaultsPath):
347 datasets.merge(dafPersist.Policy(defaultsPath))
350 setattr(self, name, mappings)
351 for datasetType
in datasets.names(
True):
352 subPolicy = datasets[datasetType]
353 subPolicy.merge(defPolicy)
355 if not hasattr(self,
"map_" + datasetType)
and 'composite' in subPolicy:
356 def compositeClosure(dataId, write=False, mapper=None, mapping=None,
357 subPolicy=subPolicy):
358 components = subPolicy.get(
'composite')
359 assembler = subPolicy[
'assembler']
if 'assembler' in subPolicy
else None 360 disassembler = subPolicy[
'disassembler']
if 'disassembler' in subPolicy
else None 361 python = subPolicy[
'python']
362 butlerComposite = dafPersist.ButlerComposite(assembler=assembler,
363 disassembler=disassembler,
367 for name, component
in components.items():
368 butlerComposite.add(id=name,
369 datasetType=component.get(
'datasetType'),
370 setter=component.get(
'setter',
None),
371 getter=component.get(
'getter',
None),
372 subset=component.get(
'subset',
False),
373 inputOnly=component.get(
'inputOnly',
False))
374 return butlerComposite
375 setattr(self,
"map_" + datasetType, compositeClosure)
379 if name ==
"calibrations":
381 provided=provided, dataRoot=rootStorage)
383 mapping = cls(datasetType, subPolicy, self.
registry, rootStorage, provided=provided)
384 self.
keyDict.update(mapping.keys())
385 mappings[datasetType] = mapping
386 self.
mappings[datasetType] = mapping
387 if not hasattr(self,
"map_" + datasetType):
388 def mapClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
389 return mapping.map(mapper, dataId, write)
390 setattr(self,
"map_" + datasetType, mapClosure)
391 if not hasattr(self,
"query_" + datasetType):
392 def queryClosure(format, dataId, mapping=mapping):
393 return mapping.lookup(format, dataId)
394 setattr(self,
"query_" + datasetType, queryClosure)
395 if hasattr(mapping,
"standardize")
and not hasattr(self,
"std_" + datasetType):
396 def stdClosure(item, dataId, mapper=weakref.proxy(self), mapping=mapping):
397 return mapping.standardize(mapper, item, dataId)
398 setattr(self,
"std_" + datasetType, stdClosure)
400 def setMethods(suffix, mapImpl=None, bypassImpl=None, queryImpl=None):
401 """Set convenience methods on CameraMapper""" 402 mapName =
"map_" + datasetType +
"_" + suffix
403 bypassName =
"bypass_" + datasetType +
"_" + suffix
404 queryName =
"query_" + datasetType +
"_" + suffix
405 if not hasattr(self, mapName):
406 setattr(self, mapName, mapImpl
or getattr(self,
"map_" + datasetType))
407 if not hasattr(self, bypassName):
408 if bypassImpl
is None and hasattr(self,
"bypass_" + datasetType):
409 bypassImpl = getattr(self,
"bypass_" + datasetType)
410 if bypassImpl
is not None:
411 setattr(self, bypassName, bypassImpl)
412 if not hasattr(self, queryName):
413 setattr(self, queryName, queryImpl
or getattr(self,
"query_" + datasetType))
416 setMethods(
"filename", bypassImpl=
lambda datasetType, pythonType, location, dataId:
417 [os.path.join(location.getStorage().root, p)
for p
in location.getLocations()])
419 if subPolicy[
"storage"] ==
"FitsStorage":
420 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
421 readMetadata(location.getLocationsWithRoot()[0]))
424 addName =
"add_" + datasetType
425 if not hasattr(self, addName):
428 if name ==
"exposures":
429 setMethods(
"wcs", bypassImpl=
lambda datasetType, pythonType, location, dataId:
430 afwGeom.makeSkyWcs(readMetadata(location.getLocationsWithRoot()[0])))
431 setMethods(
"calib", bypassImpl=
lambda datasetType, pythonType, location, dataId:
432 afwImage.Calib(readMetadata(location.getLocationsWithRoot()[0])))
433 setMethods(
"visitInfo",
434 bypassImpl=
lambda datasetType, pythonType, location, dataId:
435 afwImage.VisitInfo(readMetadata(location.getLocationsWithRoot()[0])))
437 bypassImpl=
lambda datasetType, pythonType, location, dataId:
438 afwImage.Filter(readMetadata(location.getLocationsWithRoot()[0])))
439 setMethods(
"detector",
440 mapImpl=
lambda dataId, write=
False:
441 dafPersist.ButlerLocation(
442 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
444 storageName=
"Internal",
445 locationList=
"ignored",
450 bypassImpl=
lambda datasetType, pythonType, location, dataId:
453 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
454 afwImage.bboxFromMetadata(
455 readMetadata(location.getLocationsWithRoot()[0], hdu=1)))
457 elif name ==
"images":
458 setMethods(
"bbox", bypassImpl=
lambda dsType, pyType, location, dataId:
459 afwImage.bboxFromMetadata(
460 readMetadata(location.getLocationsWithRoot()[0])))
462 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
463 setMethods(
"md", bypassImpl=
lambda datasetType, pythonType, location, dataId:
464 readMetadata(os.path.join(location.getStorage().root,
465 location.getLocations()[0]), hdu=1))
468 if subPolicy[
"storage"] ==
"FitsStorage":
469 def mapSubClosure(dataId, write=False, mapper=weakref.proxy(self), mapping=mapping):
470 subId = dataId.copy()
472 loc = mapping.map(mapper, subId, write)
473 bbox = dataId[
'bbox']
474 llcX = bbox.getMinX()
475 llcY = bbox.getMinY()
476 width = bbox.getWidth()
477 height = bbox.getHeight()
478 loc.additionalData.set(
'llcX', llcX)
479 loc.additionalData.set(
'llcY', llcY)
480 loc.additionalData.set(
'width', width)
481 loc.additionalData.set(
'height', height)
482 if 'imageOrigin' in dataId:
483 loc.additionalData.set(
'imageOrigin',
484 dataId[
'imageOrigin'])
487 def querySubClosure(key, format, dataId, mapping=mapping):
488 subId = dataId.copy()
490 return mapping.lookup(format, subId)
491 setMethods(
"sub", mapImpl=mapSubClosure, queryImpl=querySubClosure)
493 if subPolicy[
"storage"] ==
"FitsCatalogStorage":
495 setMethods(
"len", bypassImpl=
lambda datasetType, pythonType, location, dataId:
496 readMetadata(os.path.join(location.getStorage().root,
497 location.getLocations()[0]),
498 hdu=1).get(
"NAXIS2"))
501 if not datasetType.endswith(
"_schema")
and datasetType +
"_schema" not in datasets:
502 setMethods(
"schema", bypassImpl=
lambda datasetType, pythonType, location, dataId:
503 afwTable.Schema.readFits(os.path.join(location.getStorage().root,
504 location.getLocations()[0])))
506 def _computeCcdExposureId(self, dataId):
507 """Compute the 64-bit (long) identifier for a CCD exposure. 509 Subclasses must override 514 Data identifier with visit, ccd. 516 raise NotImplementedError()
518 def _computeCoaddExposureId(self, dataId, singleFilter):
519 """Compute the 64-bit (long) identifier for a coadd. 521 Subclasses must override 526 Data identifier with tract and patch. 527 singleFilter : `bool` 528 True means the desired ID is for a single-filter coadd, in which 529 case dataIdmust contain filter. 531 raise NotImplementedError()
533 def _search(self, path):
534 """Search for path in the associated repository's storage. 539 Path that describes an object in the repository associated with 541 Path may contain an HDU indicator, e.g. 'foo.fits[1]'. The 542 indicator will be stripped when searching and so will match 543 filenames without the HDU indicator, e.g. 'foo.fits'. The path 544 returned WILL contain the indicator though, e.g. ['foo.fits[1]']. 549 The path for this object in the repository. Will return None if the 550 object can't be found. If the input argument path contained an HDU 551 indicator, the returned path will also contain the HDU indicator. 556 """Rename any existing object with the given type and dataId. 558 The CameraMapper implementation saves objects in a sequence of e.g.: 564 All of the backups will be placed in the output repo, however, and will 565 not be removed if they are found elsewhere in the _parent chain. This 566 means that the same file will be stored twice if the previous version was 567 found in an input repo. 576 def firstElement(list):
577 """Get the first element in the list, or None if that can't be done. 579 return list[0]
if list
is not None and len(list)
else None 582 newLocation = self.map(datasetType, dataId, write=
True)
583 newPath = newLocation.getLocations()[0]
584 path = dafPersist.PosixStorage.search(self.
root, newPath, searchParents=
True)
585 path = firstElement(path)
587 while path
is not None:
589 oldPaths.append((n, path))
590 path = dafPersist.PosixStorage.search(self.
root,
"%s~%d" % (newPath, n), searchParents=
True)
591 path = firstElement(path)
592 for n, oldPath
in reversed(oldPaths):
593 self.
rootStorage.copyFile(oldPath,
"%s~%d" % (newPath, n))
596 """Return supported keys. 601 List of keys usable in a dataset identifier 606 """Return a dict of supported keys and their value types for a given dataset 607 type at a given level of the key hierarchy. 612 Dataset type or None for all dataset types. 613 level : `str` or None 614 Level or None for all levels or '' for the default level for the 620 Keys are strings usable in a dataset identifier, values are their 628 if datasetType
is None:
629 keyDict = copy.copy(self.
keyDict)
632 if level
is not None and level
in self.
levels:
633 keyDict = copy.copy(keyDict)
634 for l
in self.
levels[level]:
649 """Return the name of the camera that this CameraMapper is for.""" 651 className = className[className.find(
'.'):-1]
652 m = re.search(
r'(\w+)Mapper', className)
654 m = re.search(
r"class '[\w.]*?(\w+)'", className)
656 return name[:1].lower() + name[1:]
if name
else '' 660 """Return the name of the package containing this CameraMapper.""" 662 raise ValueError(
'class variable packageName must not be None')
667 """Return the base directory of this package""" 671 """Map a camera dataset.""" 673 raise RuntimeError(
"No camera dataset available.")
675 return dafPersist.ButlerLocation(
676 pythonType=
"lsst.afw.cameraGeom.CameraConfig",
678 storageName=
"ConfigStorage",
686 """Return the (preloaded) camera object. 689 raise RuntimeError(
"No camera dataset available.")
693 """Map defects dataset. 697 `lsst.daf.butler.ButlerLocation` 698 Minimal ButlerLocation containing just the locationList field 699 (just enough information that bypass_defects can use it). 702 if defectFitsPath
is None:
703 raise RuntimeError(
"No defects available for dataId=%s" % (dataId,))
705 return dafPersist.ButlerLocation(
None,
None,
None, defectFitsPath,
710 """Return a defect based on the butler location returned by map_defects 714 butlerLocation : `lsst.daf.persistence.ButlerLocation` 715 locationList = path to defects FITS file 717 Butler data ID; "ccd" must be set. 719 Note: the name "bypass_XXX" means the butler makes no attempt to convert the ButlerLocation 720 into an object, which is what we want for now, since that conversion is a bit tricky. 723 defectsFitsPath = butlerLocation.locationList[0]
724 with pyfits.open(defectsFitsPath)
as hduList:
725 for hdu
in hduList[1:]:
726 if hdu.header[
"name"] != detectorName:
730 for data
in hdu.data:
731 bbox = afwGeom.Box2I(
732 afwGeom.Point2I(int(data[
'x0']), int(data[
'y0'])),
733 afwGeom.Extent2I(int(data[
'width']), int(data[
'height'])),
735 defectList.append(afwImage.DefectBase(bbox))
738 raise RuntimeError(
"No defects for ccd %s in %s" % (detectorName, defectsFitsPath))
741 return dafPersist.ButlerLocation(
742 pythonType=
"lsst.obs.base.ExposureIdInfo",
744 storageName=
"Internal",
745 locationList=
"ignored",
752 """Hook to retrieve an lsst.obs.base.ExposureIdInfo for an exposure""" 753 expId = self.bypass_ccdExposureId(datasetType, pythonType, location, dataId)
754 expBits = self.bypass_ccdExposureId_bits(datasetType, pythonType, location, dataId)
758 """Disable standardization for bfKernel 760 bfKernel is a calibration product that is numpy array, 761 unlike other calibration products that are all images; 762 all calibration images are sent through _standardizeExposure 763 due to CalibrationMapping, but we don't want that to happen to bfKernel 768 """Standardize a raw dataset by converting it to an Exposure instead of an Image""" 770 trimmed=
False, setVisitInfo=
True)
773 """Map a sky policy.""" 774 return dafPersist.ButlerLocation(
"lsst.pex.policy.Policy",
"Policy",
775 "Internal",
None,
None, self,
779 """Standardize a sky policy by returning the one we use.""" 780 return self.skypolicy
788 def _getCcdKeyVal(self, dataId):
789 """Return CCD key and value used to look a defect in the defect registry 791 The default implementation simply returns ("ccd", full detector name) 795 def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True,
797 """Set up a registry (usually SQLite3), trying a number of possible 805 Description of registry (for log messages) 809 Policy that contains the registry name, used if path is None. 811 Key in policy for registry path. 812 storage : Storage subclass 813 Repository Storage to look in. 814 searchParents : bool, optional 815 True if the search for a registry should follow any Butler v1 817 posixIfNoSql : bool, optional 818 If an sqlite registry is not found, will create a posix registry if 823 lsst.daf.persistence.Registry 826 if path
is None and policyKey
in policy:
827 path = dafPersist.LogicalLocation(policy[policyKey]).locString()
828 if os.path.isabs(path):
829 raise RuntimeError(
"Policy should not indicate an absolute path for registry.")
830 if not storage.exists(path):
831 newPath = storage.instanceSearch(path)
833 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 835 self.
log.warn(
"Unable to locate registry at policy path (also looked in root): %s",
839 self.
log.warn(
"Unable to locate registry at policy path: %s", path)
847 if path
and (path.startswith(root)):
848 path = path[len(root +
'/'):]
849 except AttributeError:
855 def search(filename, description):
856 """Search for file in storage 861 Filename to search for 863 Description of file, for error message. 867 path : `str` or `None` 868 Path to file, or None 870 result = storage.instanceSearch(filename)
873 self.
log.debug(
"Unable to locate %s: %s", description, filename)
878 path = search(
"%s.pgsql" % name,
"%s in root" % description)
880 path = search(
"%s.sqlite3" % name,
"%s in root" % description)
882 path = search(os.path.join(
".",
"%s.sqlite3" % name),
"%s in current dir" % description)
885 if not storage.exists(path):
886 newPath = storage.instanceSearch(path)
887 newPath = newPath[0]
if newPath
is not None and len(newPath)
else None 888 if newPath
is not None:
890 localFileObj = storage.getLocalFile(path)
891 self.
log.info(
"Loading %s registry from %s", description, localFileObj.name)
892 registry = dafPersist.Registry.create(localFileObj.name)
894 elif not registry
and posixIfNoSql:
896 self.
log.info(
"Loading Posix %s registry from %s", description, storage.root)
897 registry = dafPersist.PosixRegistry(storage.root)
903 def _transformId(self, dataId):
904 """Generate a standard ID dict from a camera-specific ID dict. 906 Canonical keys include: 907 - amp: amplifier name 908 - ccd: CCD name (in LSST this is a combination of raft and sensor) 909 The default implementation returns a copy of its input. 914 Dataset identifier; this must not be modified 919 Transformed dataset identifier. 924 def _mapActualToPath(self, template, actualId):
925 """Convert a template path to an actual path, using the actual data 926 identifier. This implementation is usually sufficient but can be 927 overridden by the subclass. 944 return template % transformedId
945 except Exception
as e:
946 raise RuntimeError(
"Failed to format %r with data %r: %s" % (template, transformedId, e))
950 """Convert a CCD name to a form useful as a filename 952 The default implementation converts spaces to underscores. 954 return ccdName.replace(
" ",
"_")
956 def _extractDetectorName(self, dataId):
957 """Extract the detector (CCD) name from the dataset identifier. 959 The name in question is the detector name used by lsst.afw.cameraGeom. 971 raise NotImplementedError(
"No _extractDetectorName() function specified")
973 def _extractAmpId(self, dataId):
974 """Extract the amplifier identifer from a dataset identifier. 976 .. note:: Deprecated in 11_0 978 amplifier identifier has two parts: the detector name for the CCD 979 containing the amplifier and index of the amplifier in the detector. 993 return (trDataId[
"ccd"], int(trDataId[
'amp']))
995 def _setAmpDetector(self, item, dataId, trimmed=True):
996 """Set the detector object in an Exposure for an amplifier. 998 Defects are also added to the Exposure based on the detector object. 1002 item : `lsst.afw.image.Exposure` 1003 Exposure to set the detector in. 1007 Should detector be marked as trimmed? (ignored) 1010 return self.
_setCcdDetector(item=item, dataId=dataId, trimmed=trimmed)
1012 def _setCcdDetector(self, item, dataId, trimmed=True):
1013 """Set the detector object in an Exposure for a CCD. 1017 item : `lsst.afw.image.Exposure` 1018 Exposure to set the detector in. 1022 Should detector be marked as trimmed? (ignored) 1024 if item.getDetector()
is not None:
1028 detector = self.
camera[detectorName]
1029 item.setDetector(detector)
1031 def _setFilter(self, mapping, item, dataId):
1032 """Set the filter object in an Exposure. If the Exposure had a FILTER 1033 keyword, this was already processed during load. But if it didn't, 1034 use the filter from the registry. 1038 mapping : `lsst.obs.base.Mapping` 1039 Where to get the filter from. 1040 item : `lsst.afw.image.Exposure` 1041 Exposure to set the filter in. 1046 if not (isinstance(item, afwImage.ExposureU)
or isinstance(item, afwImage.ExposureI)
or 1047 isinstance(item, afwImage.ExposureF)
or isinstance(item, afwImage.ExposureD)):
1050 if item.getFilter().getId() != afwImage.Filter.UNKNOWN:
1053 actualId = mapping.need([
'filter'], dataId)
1054 filterName = actualId[
'filter']
1056 filterName = self.
filters[filterName]
1057 item.setFilter(afwImage.Filter(filterName))
1060 def _standardizeExposure(self, mapping, item, dataId, filter=True,
1061 trimmed=True, setVisitInfo=True):
1062 """Default standardization function for images. 1064 This sets the Detector from the camera geometry 1065 and optionally set the Fiter. In both cases this saves 1066 having to persist some data in each exposure (or image). 1070 mapping : `lsst.obs.base.Mapping` 1071 Where to get the values from. 1072 item : image-like object 1073 Can be any of lsst.afw.image.Exposure, 1074 lsst.afw.image.DecoratedImage, lsst.afw.image.Image 1075 or lsst.afw.image.MaskedImage 1080 Set filter? Ignored if item is already an exposure 1082 Should detector be marked as trimmed? 1083 setVisitInfo : `bool` 1084 Should Exposure have its VisitInfo filled out from the metadata? 1088 `lsst.afw.image.Exposure` 1089 The standardized Exposure. 1092 item =
exposureFromImage(item, dataId, mapper=self, logger=self.
log, setVisitInfo=setVisitInfo)
1093 except Exception
as e:
1094 self.
log.error(
"Could not turn item=%r into an exposure: %s" % (repr(item), e))
1097 if mapping.level.lower() ==
"amp":
1099 elif mapping.level.lower() ==
"ccd":
1107 def _defectLookup(self, dataId):
1108 """Find the defects for a given CCD. 1118 Path to the defects file or None if not available. 1123 raise RuntimeError(
"No registry for defect lookup")
1127 dataIdForLookup = {
'visit': dataId[
'visit']}
1129 rows = self.
registry.lookup((
'taiObs'), (
'raw_visit'), dataIdForLookup)
1132 assert len(rows) == 1
1138 (
"DATETIME(?)",
"DATETIME(validStart)",
"DATETIME(validEnd)"),
1140 if not rows
or len(rows) == 0:
1143 return os.path.join(self.
defectPath, rows[0][0])
1145 raise RuntimeError(
"Querying for defects (%s, %s) returns %d files: %s" %
1146 (ccdVal, taiObs, len(rows),
", ".join([_[0]
for _
in rows])))
1148 def _makeCamera(self, policy, repositoryDir):
1149 """Make a camera (instance of lsst.afw.cameraGeom.Camera) describing the camera geometry 1151 Also set self.cameraDataLocation, if relevant (else it can be left None). 1153 This implementation assumes that policy contains an entry "camera" that points to the 1154 subdirectory in this package of camera data; specifically, that subdirectory must contain: 1155 - a file named `camera.py` that contains persisted camera config 1156 - ampInfo table FITS files, as required by lsst.afw.cameraGeom.makeCameraFromPath 1160 policy : `lsst.daf.persistence.Policy` or `pexPolicy.Policy` 1161 Policy with per-camera defaults already merged 1162 (PexPolicy only for backward compatibility). 1163 repositoryDir : `str` 1164 Policy repository for the subclassing module (obtained with 1165 getRepositoryPath() on the per-camera default dictionary). 1167 if isinstance(policy, pexPolicy.Policy):
1168 policy = dafPersist.Policy(pexPolicy=policy)
1169 if 'camera' not in policy:
1170 raise RuntimeError(
"Cannot find 'camera' in policy; cannot construct a camera")
1171 cameraDataSubdir = policy[
'camera']
1173 os.path.join(repositoryDir, cameraDataSubdir,
"camera.py"))
1174 cameraConfig = afwCameraGeom.CameraConfig()
1177 return afwCameraGeom.makeCameraFromPath(
1178 cameraConfig=cameraConfig,
1179 ampInfoPath=ampInfoPath,
1185 """Get the registry used by this mapper. 1190 The registry used by this mapper for this mapper's repository. 1195 """Stuff image compression settings into a daf.base.PropertySet 1197 This goes into the ButlerLocation's "additionalData", which gets 1198 passed into the boost::persistence framework. 1203 Type of dataset for which to get the image compression settings. 1209 additionalData : `lsst.daf.base.PropertySet` 1210 Image compression settings. 1212 mapping = self.
mappings[datasetType]
1213 recipeName = mapping.recipe
1214 storageType = mapping.storage
1216 return dafBase.PropertySet()
1218 raise RuntimeError(
"Unrecognized write recipe for datasetType %s (storage type %s): %s" %
1219 (datasetType, storageType, recipeName))
1220 recipe = self.
_writeRecipes[storageType][recipeName].deepCopy()
1221 seed = hash(tuple(dataId.items())) % 2**31
1222 for plane
in (
"image",
"mask",
"variance"):
1223 if recipe.exists(plane +
".scaling.seed")
and recipe.get(plane +
".scaling.seed") == 0:
1224 recipe.set(plane +
".scaling.seed", seed)
1227 def _initWriteRecipes(self):
1228 """Read the recipes for writing files 1230 These recipes are currently used for configuring FITS compression, 1231 but they could have wider uses for configuring different flavors 1232 of the storage types. A recipe is referred to by a symbolic name, 1233 which has associated settings. These settings are stored as a 1234 `PropertySet` so they can easily be passed down to the 1235 boost::persistence framework as the "additionalData" parameter. 1237 The list of recipes is written in YAML. A default recipe and 1238 some other convenient recipes are in obs_base/policy/writeRecipes.yaml 1239 and these may be overridden or supplemented by the individual obs_* 1240 packages' own policy/writeRecipes.yaml files. 1242 Recipes are grouped by the storage type. Currently, only the 1243 ``FitsStorage`` storage type uses recipes, which uses it to 1244 configure FITS image compression. 1246 Each ``FitsStorage`` recipe for FITS compression should define 1247 "image", "mask" and "variance" entries, each of which may contain 1248 "compression" and "scaling" entries. Defaults will be provided for 1249 any missing elements under "compression" and "scaling". 1251 The allowed entries under "compression" are: 1253 * algorithm (string): compression algorithm to use 1254 * rows (int): number of rows per tile (0 = entire dimension) 1255 * columns (int): number of columns per tile (0 = entire dimension) 1256 * quantizeLevel (float): cfitsio quantization level 1258 The allowed entries under "scaling" are: 1260 * algorithm (string): scaling algorithm to use 1261 * bitpix (int): bits per pixel (0,8,16,32,64,-32,-64) 1262 * fuzz (bool): fuzz the values when quantising floating-point values? 1263 * seed (long): seed for random number generator when fuzzing 1264 * maskPlanes (list of string): mask planes to ignore when doing statistics 1265 * quantizeLevel: divisor of the standard deviation for STDEV_* scaling 1266 * quantizePad: number of stdev to allow on the low side (for STDEV_POSITIVE/NEGATIVE) 1267 * bscale: manually specified BSCALE (for MANUAL scaling) 1268 * bzero: manually specified BSCALE (for MANUAL scaling) 1270 A very simple example YAML recipe: 1276 algorithm: GZIP_SHUFFLE 1280 recipesFile = os.path.join(
getPackageDir(
"obs_base"),
"policy",
"writeRecipes.yaml")
1281 recipes = dafPersist.Policy(recipesFile)
1282 supplementsFile = os.path.join(self.
getPackageDir(),
"policy",
"writeRecipes.yaml")
1283 validationMenu = {
'FitsStorage': validateRecipeFitsStorage, }
1284 if os.path.exists(supplementsFile)
and supplementsFile != recipesFile:
1285 supplements = dafPersist.Policy(supplementsFile)
1287 for entry
in validationMenu:
1288 intersection = set(recipes[entry].names()).intersection(set(supplements.names()))
1290 raise RuntimeError(
"Recipes provided in %s section %s may not override those in %s: %s" %
1291 (supplementsFile, entry, recipesFile, intersection))
1292 recipes.update(supplements)
1295 for storageType
in recipes.names(
True):
1296 if "default" not in recipes[storageType]:
1297 raise RuntimeError(
"No 'default' recipe defined for storage type %s in %s" %
1298 (storageType, recipesFile))
1299 self.
_writeRecipes[storageType] = validationMenu[storageType](recipes[storageType])
1303 """Generate an Exposure from an image-like object 1305 If the image is a DecoratedImage then also set its WCS and metadata 1306 (Image and MaskedImage are missing the necessary metadata 1307 and Exposure already has those set) 1311 image : Image-like object 1312 Can be one of lsst.afw.image.DecoratedImage, Image, MaskedImage or 1317 `lsst.afw.image.Exposure` 1318 Exposure containing input image. 1321 if isinstance(image, afwImage.MaskedImage):
1322 exposure = afwImage.makeExposure(image)
1323 elif isinstance(image, afwImage.DecoratedImage):
1324 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image.getImage()))
1325 metadata = image.getMetadata()
1327 wcs = afwGeom.makeSkyWcs(metadata, strip=
True)
1328 exposure.setWcs(wcs)
1329 except pexExcept.TypeError
as e:
1332 logger = lsstLog.Log.getLogger(
"CameraMapper")
1333 logger.warn(
"wcs set to None; insufficient information found in metadata to create a valid wcs: " 1336 exposure.setMetadata(metadata)
1337 elif isinstance(image, afwImage.Exposure):
1340 metadata = exposure.getMetadata()
1343 exposure = afwImage.makeExposure(afwImage.makeMaskedImage(image))
1347 if setVisitInfo
and exposure.getInfo().getVisitInfo()
is None:
1348 if metadata
is not None:
1351 logger = lsstLog.Log.getLogger(
"CameraMapper")
1352 logger.warn(
"I can only set the VisitInfo if you provide a mapper")
1354 exposureId = mapper._computeCcdExposureId(dataId)
1355 visitInfo = mapper.makeRawVisitInfo(md=metadata, exposureId=exposureId)
1357 exposure.getInfo().setVisitInfo(visitInfo)
1363 """Validate recipes for FitsStorage 1365 The recipes are supplemented with default values where appropriate. 1367 TODO: replace this custom validation code with Cerberus (DM-11846) 1371 recipes : `lsst.daf.persistence.Policy` 1372 FitsStorage recipes to validate. 1376 validated : `lsst.daf.base.PropertySet` 1377 Validated FitsStorage recipe. 1382 If validation fails. 1386 compressionSchema = {
1387 "algorithm":
"NONE",
1390 "quantizeLevel": 0.0,
1393 "algorithm":
"NONE",
1395 "maskPlanes": [
"NO_DATA"],
1397 "quantizeLevel": 4.0,
1404 def checkUnrecognized(entry, allowed, description):
1405 """Check to see if the entry contains unrecognised keywords""" 1406 unrecognized = set(entry.keys()) - set(allowed)
1409 "Unrecognized entries when parsing image compression recipe %s: %s" %
1410 (description, unrecognized))
1413 for name
in recipes.names(
True):
1414 checkUnrecognized(recipes[name], [
"image",
"mask",
"variance"], name)
1415 rr = dafBase.PropertySet()
1416 validated[name] = rr
1417 for plane
in (
"image",
"mask",
"variance"):
1418 checkUnrecognized(recipes[name][plane], [
"compression",
"scaling"],
1419 name +
"->" + plane)
1421 for settings, schema
in ((
"compression", compressionSchema),
1422 (
"scaling", scalingSchema)):
1423 prefix = plane +
"." + settings
1424 if settings
not in recipes[name][plane]:
1426 rr.set(prefix +
"." + key, schema[key])
1428 entry = recipes[name][plane][settings]
1429 checkUnrecognized(entry, schema.keys(), name +
"->" + plane +
"->" + settings)
1431 value = type(schema[key])(entry[key])
if key
in entry
else schema[key]
1432 rr.set(prefix +
"." + key, value)
def _makeCamera(self, policy, repositoryDir)
def map_expIdInfo(self, dataId, write=False)
def _setAmpDetector(self, item, dataId, trimmed=True)
def validateRecipeFitsStorage(recipes)
def _standardizeExposure(self, mapping, item, dataId, filter=True, trimmed=True, setVisitInfo=True)
def _extractDetectorName(self, dataId)
def _setFilter(self, mapping, item, dataId)
def _setCcdDetector(self, item, dataId, trimmed=True)
def bypass_defects(self, datasetType, pythonType, butlerLocation, dataId)
def std_bfKernel(self, item, dataId)
def getKeys(self, datasetType, level)
def getImageCompressionSettings(self, datasetType, dataId)
def map_defects(self, dataId, write=False)
def map_camera(self, dataId, write=False)
def std_raw(self, item, dataId)
def backup(self, datasetType, dataId)
def _setupRegistry(self, name, description, path, policy, policyKey, storage, searchParents=True, posixIfNoSql=True)
def map_skypolicy(self, dataId)
def std_skypolicy(self, item, dataId)
def _defectLookup(self, dataId)
def bypass_camera(self, datasetType, pythonType, butlerLocation, dataId)
def _initMappings(self, policy, rootStorage=None, calibStorage=None, provided=None)
def getDefaultSubLevel(self, level)
def _transformId(self, dataId)
def getDefaultLevel(self)
def __init__(self, policy, repositoryDir, root=None, registry=None, calibRoot=None, calibRegistry=None, provided=None, parentRegistry=None, repositoryCfg=None)
def bypass_expIdInfo(self, datasetType, pythonType, location, dataId)
def exposureFromImage(image, dataId=None, mapper=None, logger=None, setVisitInfo=True)
def _initWriteRecipes(self)
def getShortCcdName(ccdName)
def _getCcdKeyVal(self, dataId)
Utility functions.