23 from builtins
import zip
24 from builtins
import object
25 from collections
import OrderedDict
28 from lsst.daf.base
import PropertySet
29 from lsst.daf.persistence
import ButlerLocation, NoResults
30 from lsst.daf.persistence.policy
import Policy
31 import lsst.pex.policy
as pexPolicy
33 __all__ = [
"Mapping",
"ImageMapping",
"ExposureMapping",
"CalibrationMapping",
"DatasetMapping"]
38 """Mapping is a base class for all mappings. Mappings are used by 39 the Mapper to map (determine a path to some data given some 40 identifiers) and standardize (convert data into some standard 41 format or type) data, and to query the associated registry to see 42 what data is available. 44 Subclasses must specify self.storage or else override self.map(). 46 Public methods: lookup, have, need, getKeys, map 48 Mappings are specified mainly by policy. A Mapping policy should 51 template (string): a Python string providing the filename for that 52 particular dataset type based on some data identifiers. In the 53 case of redundancy in the path (e.g., file uniquely specified by 54 the exposure number, but filter in the path), the 55 redundant/dependent identifiers can be looked up in the registry. 57 python (string): the Python type for the retrieved data (e.g. 58 lsst.afw.image.ExposureF) 60 persistable (string): the Persistable registration for the on-disk data 63 storage (string, optional): Storage type for this dataset type (e.g. 66 level (string, optional): the level in the camera hierarchy at which the 67 data is stored (Amp, Ccd or skyTile), if relevant 69 tables (string, optional): a whitespace-delimited list of tables in the 70 registry that can be NATURAL JOIN-ed to look up additional 76 Butler dataset type to be mapped. 77 policy : `daf_persistence.Policy` or `pexPolicy.Policy` 78 Mapping Policy. (pexPolicy only for backward compatibility) 79 registry : `lsst.obs.base.Registry` 80 Registry for metadata lookups. 81 rootStorage : Storage subclass instance 82 Interface to persisted repository data. 83 provided : `list` of `str` 84 Keys provided by the mapper. 87 def __init__(self, datasetType, policy, registry, rootStorage, provided=None):
90 raise RuntimeError(
"No policy provided for mapping")
92 if isinstance(policy, pexPolicy.Policy):
93 policy = Policy(policy)
107 (k, _formatMap(v, k, datasetType))
109 re.findall(
r'\%\((\w+)\).*?([diouxXeEfFgGcrs])', self.
template)
113 if provided
is not None:
120 if 'level' in policy:
122 if 'tables' in policy:
128 self.
obsTimeName = policy[
'obsTimeName']
if 'obsTimeName' in policy
else None 129 self.
recipe = policy[
'recipe']
if 'recipe' in policy
else 'default' 136 raise RuntimeError(
"Template is not defined for the {} dataset type, ".format(self.
datasetType) +
137 "it must be set before it can be used.")
140 """Return the dict of keys and value types required for this mapping.""" 143 def map(self, mapper, dataId, write=False):
144 """Standard implementation of map function. 148 mapper: `lsst.daf.persistence.Mapper` 155 lsst.daf.persistence.ButlerLocation 156 Location of object that was mapped. 159 usedDataId = {key: actualId[key]
for key
in self.
keyDict.
keys()}
160 path = mapper._mapActualToPath(self.
template, actualId)
161 if os.path.isabs(path):
162 raise RuntimeError(
"Mapped path should not be absolute.")
169 for ext
in (
None,
'.gz',
'.fz'):
170 if ext
and path.endswith(ext):
172 extPath = path + ext
if ext
else path
177 assert path,
"Fully-qualified filename is empty." 180 if hasattr(mapper, addFunc):
181 addFunc = getattr(mapper, addFunc)
182 additionalData = addFunc(self.
datasetType, actualId)
183 assert isinstance(additionalData, PropertySet), \
184 "Bad type for returned data: %s" (type(additionalData),)
186 additionalData =
None 189 locationList=path, dataId=actualId.copy(), mapper=mapper,
191 additionalData=additionalData)
194 """Look up properties for in a metadata registry given a partial 199 properties : `list` of `str` 207 Values of properties. 210 raise RuntimeError(
"No registry for lookup")
212 skyMapKeys = (
"tract",
"patch")
224 substitutions = OrderedDict()
226 properties = list(properties)
230 substitutions[p] = dataId[p]
234 "Cannot look up skymap key '%s'; it must be explicitly included in the data ID" % p
237 substitutions[p] = index
245 if p
not in (
'filter',
'expTime',
'taiObs'):
248 if fastPath
and 'visit' in dataId
and "raw" in self.
tables:
249 lookupDataId = {
'visit': dataId[
'visit']}
252 if dataId
is not None:
253 for k, v
in dataId.items():
260 where.append((k,
'?'))
262 lookupDataId = {k[0]: v
for k, v
in zip(where, values)}
271 result = [tuple(v
if k
in removed
else item[v]
for k, v
in substitutions.items())
275 def have(self, properties, dataId):
276 """Returns whether the provided data identifier has all 277 the properties in the provided list. 281 properties : `list of `str` 289 True if all properties are present. 291 for prop
in properties:
292 if prop
not in dataId:
296 def need(self, properties, dataId):
297 """Ensures all properties in the provided list are present in 298 the data identifier, looking them up as needed. This is only 299 possible for the case where the data identifies a single 304 properties : `list` of `str` 307 Partial dataset identifier 312 Copy of dataset identifier with enhanced values. 314 newId = dataId.copy()
316 for prop
in properties:
317 if prop
not in newId:
318 newProps.append(prop)
319 if len(newProps) == 0:
322 lookups = self.
lookup(newProps, newId)
323 if len(lookups) != 1:
324 raise NoResults(
"No unique lookup for %s from %s: %d matches" %
325 (newProps, newId, len(lookups)),
327 for i, prop
in enumerate(newProps):
328 newId[prop] = lookups[0][i]
332 def _formatMap(ch, k, datasetType):
333 """Convert a format character into a Python type.""" 341 raise RuntimeError(
"Unexpected format specifier %s" 342 " for field %s in template for dataset %s" %
343 (ch, k, datasetType))
347 """ImageMapping is a Mapping subclass for non-camera images. 352 Butler dataset type to be mapped. 353 policy : `daf_persistence.Policy` `pexPolicy.Policy` 354 Mapping Policy. (pexPolicy only for backward compatibility) 355 registry : `lsst.obs.base.Registry` 356 Registry for metadata lookups 358 Path of root directory 361 def __init__(self, datasetType, policy, registry, root, **kwargs):
362 if isinstance(policy, pexPolicy.Policy):
363 policy = Policy(policy)
364 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
365 self.
columns = policy.asArray(
'columns')
if 'columns' in policy
else None 369 """ExposureMapping is a Mapping subclass for normal exposures. 374 Butler dataset type to be mapped. 375 policy : `daf_persistence.Policy` or `pexPolicy.Policy` 376 Mapping Policy (pexPolicy only for backward compatibility) 377 registry : `lsst.obs.base.Registry` 378 Registry for metadata lookups 380 Path of root directory 383 def __init__(self, datasetType, policy, registry, root, **kwargs):
384 if isinstance(policy, pexPolicy.Policy):
385 policy = Policy(policy)
386 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
387 self.
columns = policy.asArray(
'columns')
if 'columns' in policy
else None 390 return mapper._standardizeExposure(self, item, dataId)
394 """CalibrationMapping is a Mapping subclass for calibration-type products. 396 The difference is that data properties in the query or template 397 can be looked up using a reference Mapping in addition to this one. 399 CalibrationMapping Policies can contain the following: 401 reference (string, optional) 402 a list of tables for finding missing dataset 403 identifier components (including the observation time, if a validity range 404 is required) in the exposure registry; note that the "tables" entry refers 405 to the calibration registry 407 refCols (string, optional) 408 a list of dataset properties required from the 409 reference tables for lookups in the calibration registry 412 true if the calibration dataset has a validity range 413 specified by a column in the tables of the reference dataset in the 414 exposure registry) and two columns in the tables of this calibration 415 dataset in the calibration registry) 417 obsTimeName (string, optional) 418 the name of the column in the reference 419 dataset tables containing the observation time (default "taiObs") 421 validStartName (string, optional) 422 the name of the column in the 423 calibration dataset tables containing the start of the validity range 424 (default "validStart") 426 validEndName (string, optional) 427 the name of the column in the 428 calibration dataset tables containing the end of the validity range 434 Butler dataset type to be mapped. 435 policy : `daf_persistence.Policy` or `pexPolicy.Policy` 436 Mapping Policy (pexPolicy only for backward compatibility) 437 registry : `lsst.obs.base.Registry` 438 Registry for metadata lookups 439 calibRegistry : `lsst.obs.base.Registry` 440 Registry for calibration metadata lookups. 442 Path of calibration root directory. 444 Path of data root directory; used for outputs only. 447 def __init__(self, datasetType, policy, registry, calibRegistry, calibRoot, dataRoot=None, **kwargs):
448 if isinstance(policy, pexPolicy.Policy):
449 policy = Policy(policy)
450 Mapping.__init__(self, datasetType, policy, calibRegistry, calibRoot, **kwargs)
451 self.
reference = policy.asArray(
"reference")
if "reference" in policy
else None 452 self.
refCols = policy.asArray(
"refCols")
if "refCols" in policy
else None 455 if "validRange" in policy
and policy[
"validRange"]:
456 self.
range = (
"?", policy[
"validStartName"], policy[
"validEndName"])
457 if "columns" in policy:
459 if "filter" in policy:
462 if "metadataKey" in policy:
465 def map(self, mapper, dataId, write=False):
466 location = Mapping.map(self, mapper, dataId, write=write)
473 """Look up properties for in a metadata registry given a partial 478 properties : `list` of `str` 479 Properties to look up. 486 Values of properties. 492 newId = dataId.copy()
496 for k, v
in dataId.items():
505 for k
in dataId.keys():
508 columns = set(properties)
512 return Mapping.lookup(self, properties, newId)
514 lookupDataId = dict(zip(where, values))
516 if len(lookups) != 1:
517 raise RuntimeError(
"No unique lookup for %s from %s: %d matches" %
518 (columns, dataId, len(lookups)))
519 if columns == set(properties):
522 for i, prop
in enumerate(columns):
523 newId[prop] = lookups[0][i]
524 return Mapping.lookup(self, properties, newId)
527 return mapper._standardizeExposure(self, item, dataId, filter=self.
setFilter)
531 """DatasetMapping is a Mapping subclass for non-Exposure datasets that can 532 be retrieved by the standard daf_persistence mechanism. 534 The differences are that the Storage type must be specified and no 535 Exposure standardization is performed. 537 The "storage" entry in the Policy is mandatory; the "tables" entry is 538 optional; no "level" entry is allowed. 543 Butler dataset type to be mapped. 544 policy : `daf_persistence.Policy` `pexPolicy.Policy` 545 Mapping Policy. (pexPolicy only for backward compatibility) 546 registry : `lsst.obs.base.Registry` 547 Registry for metadata lookups 549 Path of root directory 552 def __init__(self, datasetType, policy, registry, root, **kwargs):
553 if isinstance(policy, pexPolicy.Policy):
554 policy = Policy(policy)
555 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
def __init__(self, datasetType, policy, registry, root, kwargs)
def __init__(self, datasetType, policy, registry, calibRegistry, calibRoot, dataRoot=None, kwargs)
def __init__(self, datasetType, policy, registry, root, kwargs)
def standardize(self, mapper, item, dataId)
def map(self, mapper, dataId, write=False)
def have(self, properties, dataId)
def standardize(self, mapper, item, dataId)
def need(self, properties, dataId)
def lookup(self, properties, dataId)
def __init__(self, datasetType, policy, registry, root, kwargs)
def lookup(self, properties, dataId)
def __init__(self, datasetType, policy, registry, rootStorage, provided=None)
def map(self, mapper, dataId, write=False)