25from .deprecation
import deprecate_class
27"""This module defines the Mapper base class."""
32 """Mapper is a base class for all mappers.
34 Subclasses may define the following methods:
36 map_{datasetType}(self, dataId, write)
37 Map a dataset id for the given dataset type into a ButlerLocation.
38 If write=
True, this mapping
is for an output dataset.
40 query_{datasetType}(self, key, format, dataId)
41 Return the possible values
for the format fields that would produce
42 datasets at the granularity of key
in combination
with the provided
45 std_{datasetType}(self, item)
46 Standardize an object of the given data set type.
48 Methods that must be overridden:
51 Return a list of the keys that can be used
in data ids.
59 map(self, datasetType, dataId, write=
False)
72 '''Instantiate a Mapper from a configuration.
73 In come cases the cfg may have already been instantiated into a Mapper, this is allowed
and
74 the input var
is simply returned.
76 :param cfg: the cfg
for this mapper. It
is recommended this be created by calling
78 :
return: a Mapper instance
80 if isinstance(cfg, Policy):
81 return cfg[
'cls'](cfg)
85 """Create a new Mapper, saving arguments for pickling.
87 This is in __new__ instead of __init__ to save the user
88 from having to save the arguments themselves (either explicitly,
89 or by calling the super
's __init__ with all their
90 *args,**kwargs. The resulting pickling system (of __new__,
91 __getstate__ and __setstate__
is similar to how __reduce__
92 is usually used,
except that we save the user
from any
93 responsibility (
except when overriding __new__, but that
104 return self._arguments
109 self.
__init____init__(*args, **kwargs)
112 raise NotImplementedError(
"keys() unimplemented")
115 """Get possible values for keys given a partial data id.
117 :param datasetType: see documentation about the use of datasetType
118 :param key: this is used
as the
'level' parameter
120 :param dataId: see documentation about the use of dataId
123 func = getattr(self, 'query_' + datasetType)
125 val = func(format, self.
validatevalidate(dataId))
129 """Return a list of the mappable dataset types."""
132 for attr
in dir(self):
133 if attr.startswith(
"map_"):
134 list.append(attr[4:])
137 def map(self, datasetType, dataId, write=False):
138 """Map a data id using the mapping method for its dataset type.
143 The datasetType to map
144 dataId : DataId instance
145 The dataId to use when mapping
146 write : bool, optional
147 Indicates if the map
is being performed
for a read operation
148 (
False)
or a write operation (
True)
152 ButlerLocation
or a list of ButlerLocation
153 The location(s) found
for the map operation. If write
is True, a
154 list
is returned. If write
is False a single ButlerLocation
is
160 If no locaiton was found
for this map operation, the derived mapper
162 catches this
and will look
in the next Repository
if there
is one.
164 func = getattr(self, 'map_' + datasetType)
165 return func(self.
validatevalidate(dataId), write)
168 """Return true if this mapper can standardize an object of the given
171 return hasattr(self,
'std_' + datasetType)
174 """Standardize an object using the standardization method for its data
175 set type, if it exists.
"""
177 if hasattr(self,
'std_' + datasetType):
178 func = getattr(self,
'std_' + datasetType)
179 return func(item, self.
validatevalidate(dataId))
183 """Validate a dataId's contents.
185 If the dataId is valid,
return it. If an invalid component can be
186 transformed into a valid one, copy the dataId, fix the component,
and
187 return the copy. Otherwise,
raise an exception.
"""
192 """Rename any existing object with the given type and dataId.
194 Not implemented in the base mapper.
196 raise NotImplementedError(
"Base-class Mapper does not implement backups")
199 """Get the registry"""
def getDatasetTypes(self)
def map(self, datasetType, dataId, write=False)
def __new__(cls, *args, **kwargs)
def __init__(self, **kwargs)
def standardize(self, datasetType, item, dataId)
def queryMetadata(self, datasetType, format, dataId)
def __setstate__(self, state)
def canStandardize(self, datasetType)
def validate(self, dataId)
def backup(self, datasetType, dataId)