23 from collections
import OrderedDict
26 from lsst.daf.base
import PropertySet
27 from lsst.daf.persistence
import ButlerLocation, NoResults
28 from lsst.daf.persistence.policy
import Policy
29 import lsst.pex.policy
as pexPolicy
31 __all__ = [
"Mapping",
"ImageMapping",
"ExposureMapping",
"CalibrationMapping",
"DatasetMapping"]
36 """Mapping is a base class for all mappings. Mappings are used by 37 the Mapper to map (determine a path to some data given some 38 identifiers) and standardize (convert data into some standard 39 format or type) data, and to query the associated registry to see 40 what data is available. 42 Subclasses must specify self.storage or else override self.map(). 44 Public methods: lookup, have, need, getKeys, map 46 Mappings are specified mainly by policy. A Mapping policy should 49 template (string): a Python string providing the filename for that 50 particular dataset type based on some data identifiers. In the 51 case of redundancy in the path (e.g., file uniquely specified by 52 the exposure number, but filter in the path), the 53 redundant/dependent identifiers can be looked up in the registry. 55 python (string): the Python type for the retrieved data (e.g. 56 lsst.afw.image.ExposureF) 58 persistable (string): the Persistable registration for the on-disk data 61 storage (string, optional): Storage type for this dataset type (e.g. 64 level (string, optional): the level in the camera hierarchy at which the 65 data is stored (Amp, Ccd or skyTile), if relevant 67 tables (string, optional): a whitespace-delimited list of tables in the 68 registry that can be NATURAL JOIN-ed to look up additional 74 Butler dataset type to be mapped. 75 policy : `daf_persistence.Policy` or `pexPolicy.Policy` 76 Mapping Policy. (pexPolicy only for backward compatibility) 77 registry : `lsst.obs.base.Registry` 78 Registry for metadata lookups. 79 rootStorage : Storage subclass instance 80 Interface to persisted repository data. 81 provided : `list` of `str` 82 Keys provided by the mapper. 85 def __init__(self, datasetType, policy, registry, rootStorage, provided=None):
88 raise RuntimeError(
"No policy provided for mapping")
90 if isinstance(policy, pexPolicy.Policy):
91 policy = Policy(policy)
105 (k, _formatMap(v, k, datasetType))
107 re.findall(
r'\%\((\w+)\).*?([diouxXeEfFgGcrs])', self.
template)
111 if provided
is not None:
118 if 'level' in policy:
120 if 'tables' in policy:
126 self.
obsTimeName = policy[
'obsTimeName']
if 'obsTimeName' in policy
else None 127 self.
recipe = policy[
'recipe']
if 'recipe' in policy
else 'default' 134 raise RuntimeError(
"Template is not defined for the {} dataset type, ".format(self.
datasetType) +
135 "it must be set before it can be used.")
138 """Return the dict of keys and value types required for this mapping.""" 141 def map(self, mapper, dataId, write=False):
142 """Standard implementation of map function. 146 mapper: `lsst.daf.persistence.Mapper` 153 lsst.daf.persistence.ButlerLocation 154 Location of object that was mapped. 157 usedDataId = {key: actualId[key]
for key
in self.
keyDict.
keys()}
158 path = mapper._mapActualToPath(self.
template, actualId)
159 if os.path.isabs(path):
160 raise RuntimeError(
"Mapped path should not be absolute.")
167 for ext
in (
None,
'.gz',
'.fz'):
168 if ext
and path.endswith(ext):
170 extPath = path + ext
if ext
else path
175 assert path,
"Fully-qualified filename is empty." 178 if hasattr(mapper, addFunc):
179 addFunc = getattr(mapper, addFunc)
180 additionalData = addFunc(self.
datasetType, actualId)
181 assert isinstance(additionalData, PropertySet), \
182 "Bad type for returned data: %s" (type(additionalData),)
184 additionalData =
None 187 locationList=path, dataId=actualId.copy(), mapper=mapper,
189 additionalData=additionalData)
192 """Look up properties for in a metadata registry given a partial 197 properties : `list` of `str` 205 Values of properties. 208 raise RuntimeError(
"No registry for lookup")
210 skyMapKeys = (
"tract",
"patch")
222 substitutions = OrderedDict()
224 properties = list(properties)
228 substitutions[p] = dataId[p]
232 "Cannot look up skymap key '%s'; it must be explicitly included in the data ID" % p
235 substitutions[p] = index
243 if p
not in (
'filter',
'expTime',
'taiObs'):
246 if fastPath
and 'visit' in dataId
and "raw" in self.
tables:
247 lookupDataId = {
'visit': dataId[
'visit']}
250 if dataId
is not None:
251 for k, v
in dataId.items():
258 where.append((k,
'?'))
260 lookupDataId = {k[0]: v
for k, v
in zip(where, values)}
269 result = [tuple(v
if k
in removed
else item[v]
for k, v
in substitutions.items())
273 def have(self, properties, dataId):
274 """Returns whether the provided data identifier has all 275 the properties in the provided list. 279 properties : `list of `str` 287 True if all properties are present. 289 for prop
in properties:
290 if prop
not in dataId:
294 def need(self, properties, dataId):
295 """Ensures all properties in the provided list are present in 296 the data identifier, looking them up as needed. This is only 297 possible for the case where the data identifies a single 302 properties : `list` of `str` 305 Partial dataset identifier 310 Copy of dataset identifier with enhanced values. 312 newId = dataId.copy()
314 for prop
in properties:
315 if prop
not in newId:
316 newProps.append(prop)
317 if len(newProps) == 0:
320 lookups = self.
lookup(newProps, newId)
321 if len(lookups) != 1:
322 raise NoResults(
"No unique lookup for %s from %s: %d matches" %
323 (newProps, newId, len(lookups)),
325 for i, prop
in enumerate(newProps):
326 newId[prop] = lookups[0][i]
330 def _formatMap(ch, k, datasetType):
331 """Convert a format character into a Python type.""" 339 raise RuntimeError(
"Unexpected format specifier %s" 340 " for field %s in template for dataset %s" %
341 (ch, k, datasetType))
345 """ImageMapping is a Mapping subclass for non-camera images. 350 Butler dataset type to be mapped. 351 policy : `daf_persistence.Policy` `pexPolicy.Policy` 352 Mapping Policy. (pexPolicy only for backward compatibility) 353 registry : `lsst.obs.base.Registry` 354 Registry for metadata lookups 356 Path of root directory 359 def __init__(self, datasetType, policy, registry, root, **kwargs):
360 if isinstance(policy, pexPolicy.Policy):
361 policy = Policy(policy)
362 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
363 self.
columns = policy.asArray(
'columns')
if 'columns' in policy
else None 367 """ExposureMapping is a Mapping subclass for normal exposures. 372 Butler dataset type to be mapped. 373 policy : `daf_persistence.Policy` or `pexPolicy.Policy` 374 Mapping Policy (pexPolicy only for backward compatibility) 375 registry : `lsst.obs.base.Registry` 376 Registry for metadata lookups 378 Path of root directory 381 def __init__(self, datasetType, policy, registry, root, **kwargs):
382 if isinstance(policy, pexPolicy.Policy):
383 policy = Policy(policy)
384 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
385 self.
columns = policy.asArray(
'columns')
if 'columns' in policy
else None 388 return mapper._standardizeExposure(self, item, dataId)
392 """CalibrationMapping is a Mapping subclass for calibration-type products. 394 The difference is that data properties in the query or template 395 can be looked up using a reference Mapping in addition to this one. 397 CalibrationMapping Policies can contain the following: 399 reference (string, optional) 400 a list of tables for finding missing dataset 401 identifier components (including the observation time, if a validity range 402 is required) in the exposure registry; note that the "tables" entry refers 403 to the calibration registry 405 refCols (string, optional) 406 a list of dataset properties required from the 407 reference tables for lookups in the calibration registry 410 true if the calibration dataset has a validity range 411 specified by a column in the tables of the reference dataset in the 412 exposure registry) and two columns in the tables of this calibration 413 dataset in the calibration registry) 415 obsTimeName (string, optional) 416 the name of the column in the reference 417 dataset tables containing the observation time (default "taiObs") 419 validStartName (string, optional) 420 the name of the column in the 421 calibration dataset tables containing the start of the validity range 422 (default "validStart") 424 validEndName (string, optional) 425 the name of the column in the 426 calibration dataset tables containing the end of the validity range 432 Butler dataset type to be mapped. 433 policy : `daf_persistence.Policy` or `pexPolicy.Policy` 434 Mapping Policy (pexPolicy only for backward compatibility) 435 registry : `lsst.obs.base.Registry` 436 Registry for metadata lookups 437 calibRegistry : `lsst.obs.base.Registry` 438 Registry for calibration metadata lookups. 440 Path of calibration root directory. 442 Path of data root directory; used for outputs only. 445 def __init__(self, datasetType, policy, registry, calibRegistry, calibRoot, dataRoot=None, **kwargs):
446 if isinstance(policy, pexPolicy.Policy):
447 policy = Policy(policy)
448 Mapping.__init__(self, datasetType, policy, calibRegistry, calibRoot, **kwargs)
449 self.
reference = policy.asArray(
"reference")
if "reference" in policy
else None 450 self.
refCols = policy.asArray(
"refCols")
if "refCols" in policy
else None 453 if "validRange" in policy
and policy[
"validRange"]:
454 self.
range = (
"?", policy[
"validStartName"], policy[
"validEndName"])
455 if "columns" in policy:
457 if "filter" in policy:
460 if "metadataKey" in policy:
463 def map(self, mapper, dataId, write=False):
464 location = Mapping.map(self, mapper, dataId, write=write)
471 """Look up properties for in a metadata registry given a partial 476 properties : `list` of `str` 477 Properties to look up. 484 Values of properties. 490 newId = dataId.copy()
494 for k, v
in dataId.items():
503 for k
in dataId.keys():
506 columns = set(properties)
510 return Mapping.lookup(self, properties, newId)
512 lookupDataId = dict(zip(where, values))
514 if len(lookups) != 1:
515 raise RuntimeError(
"No unique lookup for %s from %s: %d matches" %
516 (columns, dataId, len(lookups)))
517 if columns == set(properties):
520 for i, prop
in enumerate(columns):
521 newId[prop] = lookups[0][i]
522 return Mapping.lookup(self, properties, newId)
525 return mapper._standardizeExposure(self, item, dataId, filter=self.
setFilter)
529 """DatasetMapping is a Mapping subclass for non-Exposure datasets that can 530 be retrieved by the standard daf_persistence mechanism. 532 The differences are that the Storage type must be specified and no 533 Exposure standardization is performed. 535 The "storage" entry in the Policy is mandatory; the "tables" entry is 536 optional; no "level" entry is allowed. 541 Butler dataset type to be mapped. 542 policy : `daf_persistence.Policy` `pexPolicy.Policy` 543 Mapping Policy. (pexPolicy only for backward compatibility) 544 registry : `lsst.obs.base.Registry` 545 Registry for metadata lookups 547 Path of root directory 550 def __init__(self, datasetType, policy, registry, root, **kwargs):
551 if isinstance(policy, pexPolicy.Policy):
552 policy = Policy(policy)
553 Mapping.__init__(self, datasetType, policy, registry, root, **kwargs)
def __init__(self, datasetType, policy, registry, root, kwargs)
def __init__(self, datasetType, policy, registry, calibRegistry, calibRoot, dataRoot=None, kwargs)
def __init__(self, datasetType, policy, registry, root, kwargs)
def standardize(self, mapper, item, dataId)
def map(self, mapper, dataId, write=False)
def have(self, properties, dataId)
def standardize(self, mapper, item, dataId)
def need(self, properties, dataId)
def lookup(self, properties, dataId)
def __init__(self, datasetType, policy, registry, root, kwargs)
def lookup(self, properties, dataId)
def __init__(self, datasetType, policy, registry, rootStorage, provided=None)
def map(self, mapper, dataId, write=False)