lsst.daf.persistence  13.0-31-g48013df+4
mapper.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 #
4 # LSST Data Management System
5 # Copyright 2008, 2009, 2010 LSST Corporation.
6 #
7 # This product includes software developed by the
8 # LSST Project (http://www.lsst.org/).
9 #
10 # This program is free software: you can redistribute it and/or modify
11 # it under the terms of the GNU General Public License as published by
12 # the Free Software Foundation, either version 3 of the License, or
13 # (at your option) any later version.
14 #
15 # This program is distributed in the hope that it will be useful,
16 # but WITHOUT ANY WARRANTY; without even the implied warranty of
17 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 # GNU General Public License for more details.
19 #
20 # You should have received a copy of the LSST License Statement and
21 # the GNU General Public License along with this program. If not,
22 # see <http://www.lsstcorp.org/LegalNotices/>.
23 #
24 from builtins import object
25 
26 import yaml
27 
28 from . import Policy
29 
30 """This module defines the Mapper base class."""
31 
32 
33 class Mapper(object):
34  """Mapper is a base class for all mappers.
35 
36  Subclasses may define the following methods:
37 
38  map_{datasetType}(self, dataId, write)
39  Map a dataset id for the given dataset type into a ButlerLocation.
40  If write=True, this mapping is for an output dataset.
41 
42  query_{datasetType}(self, key, format, dataId)
43  Return the possible values for the format fields that would produce
44  datasets at the granularity of key in combination with the provided
45  partial dataId.
46 
47  std_{datasetType}(self, item)
48  Standardize an object of the given data set type.
49 
50  Methods that must be overridden:
51 
52  keys(self)
53  Return a list of the keys that can be used in data ids.
54 
55  Other public methods:
56 
57  __init__(self)
58 
59  getDatasetTypes(self)
60 
61  map(self, datasetType, dataId, write=False)
62 
63  queryMetadata(self, datasetType, key, format, dataId)
64 
65  canStandardize(self, datasetType)
66 
67  standardize(self, datasetType, item, dataId)
68 
69  validate(self, dataId)
70  """
71 
72  @staticmethod
73  def Mapper(cfg):
74  '''Instantiate a Mapper from a configuration.
75  In come cases the cfg may have already been instantiated into a Mapper, this is allowed and
76  the input var is simply returned.
77 
78  :param cfg: the cfg for this mapper. It is recommended this be created by calling
79  Mapper.cfg()
80  :return: a Mapper instance
81  '''
82  if isinstance(cfg, Policy):
83  return cfg['cls'](cfg)
84  return cfg
85 
86  def __new__(cls, *args, **kwargs):
87  """Create a new Mapper, saving arguments for pickling.
88 
89  This is in __new__ instead of __init__ to save the user
90  from having to save the arguments themselves (either explicitly,
91  or by calling the super's __init__ with all their
92  *args,**kwargs. The resulting pickling system (of __new__,
93  __getstate__ and __setstate__ is similar to how __reduce__
94  is usually used, except that we save the user from any
95  responsibility (except when overriding __new__, but that
96  is not common).
97  """
98  self = super(Mapper, cls).__new__(cls)
99  self._arguments = (args, kwargs)
100  return self
101 
102  def __init__(self, **kwargs):
103  pass
104 
105  def __getstate__(self):
106  return self._arguments
107 
108  def __setstate__(self, state):
109  self._arguments = state
110  args, kwargs = state
111  self.__init__(*args, **kwargs)
112 
113  def keys(self):
114  raise NotImplementedError("keys() unimplemented")
115 
116  def queryMetadata(self, datasetType, format, dataId):
117  """Get possible values for keys given a partial data id.
118 
119  :param datasetType: see documentation about the use of datasetType
120  :param key: this is used as the 'level' parameter
121  :param format:
122  :param dataId: see documentation about the use of dataId
123  :return:
124  """
125  func = getattr(self, 'query_' + datasetType)
126 
127  val = func(format, self.validate(dataId))
128  return val
129 
130  def getDatasetTypes(self):
131  """Return a list of the mappable dataset types."""
132 
133  list = []
134  for attr in dir(self):
135  if attr.startswith("map_"):
136  list.append(attr[4:])
137  return list
138 
139  def map(self, datasetType, dataId, write=False):
140  """Map a data id using the mapping method for its dataset type.
141 
142  Parameters
143  ----------
144  datasetType : string
145  The datasetType to map
146  dataId : DataId instance
147  The dataId to use when mapping
148  write : bool, optional
149  Indicates if the map is being performed for a read operation
150  (False) or a write operation (True)
151 
152  Returns
153  -------
154  ButlerLocation or a list of ButlerLocation
155  The location(s) found for the map operation. If write is True, a
156  list is returned. If write is False a single ButlerLocation is
157  returned.
158 
159  Raises
160  ------
161  NoResults
162  If no locaiton was found for this map operation, the derived mapper
163  class may raise a lsst.daf.persistence.NoResults exception. Butler
164  catches this and will look in the next Repository if there is one.
165  """
166  func = getattr(self, 'map_' + datasetType)
167  return func(self.validate(dataId), write)
168 
169 
170  def canStandardize(self, datasetType):
171  """Return true if this mapper can standardize an object of the given
172  dataset type."""
173 
174  return hasattr(self, 'std_' + datasetType)
175 
176  def standardize(self, datasetType, item, dataId):
177  """Standardize an object using the standardization method for its data
178  set type, if it exists."""
179 
180  if hasattr(self, 'std_' + datasetType):
181  func = getattr(self, 'std_' + datasetType)
182  return func(item, self.validate(dataId))
183  return item
184 
185  def validate(self, dataId):
186  """Validate a dataId's contents.
187 
188  If the dataId is valid, return it. If an invalid component can be
189  transformed into a valid one, copy the dataId, fix the component, and
190  return the copy. Otherwise, raise an exception."""
191 
192  return dataId
193 
194  def backup(self, datasetType, dataId):
195  """Rename any existing object with the given type and dataId.
196 
197  Not implemented in the base mapper.
198  """
199  raise NotImplementedError("Base-class Mapper does not implement backups")
200 
201  def getRegistry(self):
202  """Get the registry"""
203  return None
def backup(self, datasetType, dataId)
Definition: mapper.py:194
def canStandardize(self, datasetType)
Definition: mapper.py:170
def map(self, datasetType, dataId, write=False)
Definition: mapper.py:139
def queryMetadata(self, datasetType, format, dataId)
Definition: mapper.py:116
def standardize(self, datasetType, item, dataId)
Definition: mapper.py:176
def __new__(cls, args, kwargs)
Definition: mapper.py:86