Coverage for python/lsst/daf/butler/datastore/generic_base.py: 32%
38 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-25 10:50 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-25 10:50 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Generic datastore code useful for most datastores."""
30from __future__ import annotations
32__all__ = ("GenericBaseDatastore", "post_process_get")
34import logging
35from collections.abc import Mapping
36from typing import TYPE_CHECKING, Any, Generic, TypeVar
38from .._exceptions import DatasetTypeNotSupportedError
39from ..datastore._datastore import Datastore
40from .stored_file_info import StoredDatastoreItemInfo
42if TYPE_CHECKING:
43 from .._dataset_ref import DatasetRef
44 from .._storage_class import StorageClass
46log = logging.getLogger(__name__)
48_InfoType = TypeVar("_InfoType", bound=StoredDatastoreItemInfo)
51class GenericBaseDatastore(Datastore, Generic[_InfoType]):
52 """Methods useful for most implementations of a `Datastore`.
54 Should always be sub-classed since key abstract methods are missing.
55 """
57 def _validate_put_parameters(self, inMemoryDataset: object, ref: DatasetRef) -> None:
58 """Validate the supplied arguments for put.
60 Parameters
61 ----------
62 inMemoryDataset : `object`
63 The dataset to store.
64 ref : `DatasetRef`
65 Reference to the associated Dataset.
66 """
67 storageClass = ref.datasetType.storageClass
69 # Sanity check
70 if not isinstance(inMemoryDataset, storageClass.pytype):
71 raise TypeError(
72 f"Inconsistency between supplied object ({type(inMemoryDataset)}) "
73 f"and storage class type ({storageClass.pytype})"
74 )
76 # Confirm that we can accept this dataset
77 if not self.constraints.isAcceptable(ref):
78 # Raise rather than use boolean return value.
79 raise DatasetTypeNotSupportedError(
80 f"Dataset {ref} has been rejected by this datastore via configuration."
81 )
83 return
85 def remove(self, ref: DatasetRef) -> None:
86 """Indicate to the Datastore that a dataset can be removed.
88 .. warning::
90 This method deletes the artifact associated with this
91 dataset and can not be reversed.
93 Parameters
94 ----------
95 ref : `DatasetRef`
96 Reference to the required Dataset.
98 Raises
99 ------
100 FileNotFoundError
101 Attempt to remove a dataset that does not exist.
103 Notes
104 -----
105 This method is used for immediate removal of a dataset and is
106 generally reserved for internal testing of datastore APIs.
107 It is implemented by calling `trash()` and then immediately calling
108 `emptyTrash()`. This call is meant to be immediate so errors
109 encountered during removal are not ignored.
110 """
111 self.trash(ref, ignore_errors=False)
112 self.emptyTrash(ignore_errors=False)
114 def transfer(self, inputDatastore: Datastore, ref: DatasetRef) -> None:
115 """Retrieve a dataset from an input `Datastore`,
116 and store the result in this `Datastore`.
118 Parameters
119 ----------
120 inputDatastore : `Datastore`
121 The external `Datastore` from which to retreive the Dataset.
122 ref : `DatasetRef`
123 Reference to the required dataset in the input data store.
124 """
125 assert inputDatastore is not self # unless we want it for renames?
126 inMemoryDataset = inputDatastore.get(ref)
127 return self.put(inMemoryDataset, ref)
130def post_process_get(
131 inMemoryDataset: object,
132 readStorageClass: StorageClass,
133 assemblerParams: Mapping[str, Any] | None = None,
134 isComponent: bool = False,
135) -> object:
136 """Given the Python object read from the datastore, manipulate
137 it based on the supplied parameters and ensure the Python
138 type is correct.
140 Parameters
141 ----------
142 inMemoryDataset : `object`
143 Dataset to check.
144 readStorageClass : `StorageClass`
145 The `StorageClass` used to obtain the assembler and to
146 check the python type.
147 assemblerParams : `dict`, optional
148 Parameters to pass to the assembler. Can be `None`.
149 isComponent : `bool`, optional
150 If this is a component, allow the inMemoryDataset to be `None`.
152 Returns
153 -------
154 dataset : `object`
155 In-memory dataset, potentially converted to expected type.
156 """
157 # Process any left over parameters
158 if assemblerParams:
159 inMemoryDataset = readStorageClass.delegate().handleParameters(inMemoryDataset, assemblerParams)
161 # Validate the returned data type matches the expected data type
162 pytype = readStorageClass.pytype
164 allowedTypes = []
165 if pytype:
166 allowedTypes.append(pytype)
168 # Special case components to allow them to be None
169 if isComponent:
170 allowedTypes.append(type(None))
172 if allowedTypes and not isinstance(inMemoryDataset, tuple(allowedTypes)):
173 inMemoryDataset = readStorageClass.coerce_type(inMemoryDataset)
175 return inMemoryDataset