|
def | __init__ (self, root, **kwargs) |
|
def | map_foo (self, dataId, write) |
|
def | __new__ (cls, *args, **kwargs) |
|
def | __getstate__ (self) |
|
def | __setstate__ (self, state) |
|
def | keys (self) |
|
def | queryMetadata (self, datasetType, format, dataId) |
|
def | getDatasetTypes (self) |
|
def | map (self, datasetType, dataId, write=False) |
|
def | canStandardize (self, datasetType) |
|
def | standardize (self, datasetType, item, dataId) |
|
def | validate (self, dataId) |
|
def | backup (self, datasetType, dataId) |
|
def | getRegistry (self) |
|
Definition at line 41 of file testMapper.py.
◆ __init__()
def lsst.daf.persistence.test.testMapper.MapperForTestWriting.__init__ |
( |
|
self, |
|
|
|
root, |
|
|
** |
kwargs |
|
) |
| |
◆ __getstate__()
def lsst.daf.persistence.mapper.Mapper.__getstate__ |
( |
|
self | ) |
|
|
inherited |
◆ __new__()
def lsst.daf.persistence.mapper.Mapper.__new__ |
( |
|
cls, |
|
|
* |
args, |
|
|
** |
kwargs |
|
) |
| |
|
inherited |
Create a new Mapper, saving arguments for pickling.
This is in __new__ instead of __init__ to save the user
from having to save the arguments themselves (either explicitly,
or by calling the super's __init__ with all their
*args,**kwargs. The resulting pickling system (of __new__,
__getstate__ and __setstate__ is similar to how __reduce__
is usually used, except that we save the user from any
responsibility (except when overriding __new__, but that
is not common).
Definition at line 82 of file mapper.py.
◆ __setstate__()
def lsst.daf.persistence.mapper.Mapper.__setstate__ |
( |
|
self, |
|
|
|
state |
|
) |
| |
|
inherited |
◆ backup()
def lsst.daf.persistence.mapper.Mapper.backup |
( |
|
self, |
|
|
|
datasetType, |
|
|
|
dataId |
|
) |
| |
|
inherited |
Rename any existing object with the given type and dataId.
Not implemented in the base mapper.
Definition at line 189 of file mapper.py.
◆ canStandardize()
def lsst.daf.persistence.mapper.Mapper.canStandardize |
( |
|
self, |
|
|
|
datasetType |
|
) |
| |
|
inherited |
Return true if this mapper can standardize an object of the given
dataset type.
Definition at line 165 of file mapper.py.
◆ getDatasetTypes()
def lsst.daf.persistence.mapper.Mapper.getDatasetTypes |
( |
|
self | ) |
|
|
inherited |
Return a list of the mappable dataset types.
Definition at line 126 of file mapper.py.
◆ getRegistry()
def lsst.daf.persistence.mapper.Mapper.getRegistry |
( |
|
self | ) |
|
|
inherited |
◆ keys()
def lsst.daf.persistence.mapper.Mapper.keys |
( |
|
self | ) |
|
|
inherited |
◆ map()
def lsst.daf.persistence.mapper.Mapper.map |
( |
|
self, |
|
|
|
datasetType, |
|
|
|
dataId, |
|
|
|
write = False |
|
) |
| |
|
inherited |
Map a data id using the mapping method for its dataset type.
Parameters
----------
datasetType : string
The datasetType to map
dataId : DataId instance
The dataId to use when mapping
write : bool, optional
Indicates if the map is being performed for a read operation
(False) or a write operation (True)
Returns
-------
ButlerLocation or a list of ButlerLocation
The location(s) found for the map operation. If write is True, a
list is returned. If write is False a single ButlerLocation is
returned.
Raises
------
NoResults
If no locaiton was found for this map operation, the derived mapper
class may raise a lsst.daf.persistence.NoResults exception. Butler
catches this and will look in the next Repository if there is one.
Definition at line 135 of file mapper.py.
◆ map_foo()
def lsst.daf.persistence.test.testMapper.MapperForTestWriting.map_foo |
( |
|
self, |
|
|
|
dataId, |
|
|
|
write |
|
) |
| |
◆ queryMetadata()
def lsst.daf.persistence.mapper.Mapper.queryMetadata |
( |
|
self, |
|
|
|
datasetType, |
|
|
|
format, |
|
|
|
dataId |
|
) |
| |
|
inherited |
Get possible values for keys given a partial data id.
:param datasetType: see documentation about the use of datasetType
:param key: this is used as the 'level' parameter
:param format:
:param dataId: see documentation about the use of dataId
:return:
Definition at line 112 of file mapper.py.
◆ standardize()
def lsst.daf.persistence.mapper.Mapper.standardize |
( |
|
self, |
|
|
|
datasetType, |
|
|
|
item, |
|
|
|
dataId |
|
) |
| |
|
inherited |
Standardize an object using the standardization method for its data
set type, if it exists.
Definition at line 171 of file mapper.py.
◆ validate()
def lsst.daf.persistence.mapper.Mapper.validate |
( |
|
self, |
|
|
|
dataId |
|
) |
| |
|
inherited |
Validate a dataId's contents.
If the dataId is valid, return it. If an invalid component can be
transformed into a valid one, copy the dataId, fix the component, and
return the copy. Otherwise, raise an exception.
Definition at line 180 of file mapper.py.
◆ root
lsst.daf.persistence.test.testMapper.MapperForTestWriting.root |
◆ storage
lsst.daf.persistence.test.testMapper.MapperForTestWriting.storage |
The documentation for this class was generated from the following file: