Coverage for python / lsst / daf / butler / datastore / generic_base.py: 35%
29 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-05-01 08:18 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-05-01 08:18 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Generic datastore code useful for most datastores."""
30from __future__ import annotations
32__all__ = ("GenericBaseDatastore", "post_process_get")
34import logging
35from collections.abc import Mapping
36from typing import TYPE_CHECKING, Any, Generic, TypeVar
38from ..datastore._datastore import Datastore
39from .stored_file_info import StoredDatastoreItemInfo
41if TYPE_CHECKING:
42 from .._dataset_ref import DatasetRef
43 from .._storage_class import StorageClass
45log = logging.getLogger(__name__)
47_InfoType = TypeVar("_InfoType", bound=StoredDatastoreItemInfo)
50class GenericBaseDatastore(Datastore, Generic[_InfoType]):
51 """Methods useful for most implementations of a `Datastore`.
53 Should always be sub-classed since key abstract methods are missing.
54 """
56 def remove(self, ref: DatasetRef) -> None:
57 """Indicate to the Datastore that a dataset can be removed.
59 .. warning::
61 This method deletes the artifact associated with this
62 dataset and can not be reversed.
64 Parameters
65 ----------
66 ref : `DatasetRef`
67 Reference to the required Dataset.
69 Raises
70 ------
71 FileNotFoundError
72 Attempt to remove a dataset that does not exist.
74 Notes
75 -----
76 This method is used for immediate removal of a dataset and is
77 generally reserved for internal testing of datastore APIs.
78 It is implemented by calling `trash()` and then immediately calling
79 `emptyTrash()`. This call is meant to be immediate so errors
80 encountered during removal are not ignored.
81 """
82 self.trash(ref, ignore_errors=False)
83 self.emptyTrash(ignore_errors=False, refs=[ref])
85 def transfer(self, inputDatastore: Datastore, ref: DatasetRef) -> None:
86 """Retrieve a dataset from an input `Datastore`,
87 and store the result in this `Datastore`.
89 Parameters
90 ----------
91 inputDatastore : `Datastore`
92 The external `Datastore` from which to retrieve the Dataset.
93 ref : `DatasetRef`
94 Reference to the required dataset in the input data store.
95 """
96 assert inputDatastore is not self # unless we want it for renames?
97 inMemoryDataset = inputDatastore.get(ref)
98 return self.put(inMemoryDataset, ref)
101def post_process_get(
102 inMemoryDataset: object,
103 readStorageClass: StorageClass,
104 assemblerParams: Mapping[str, Any] | None = None,
105 isComponent: bool = False,
106) -> object:
107 """Given the Python object read from the datastore, manipulate
108 it based on the supplied parameters and ensure the Python
109 type is correct.
111 Parameters
112 ----------
113 inMemoryDataset : `object`
114 Dataset to check.
115 readStorageClass : `StorageClass`
116 The `StorageClass` used to obtain the assembler and to
117 check the python type.
118 assemblerParams : `dict`, optional
119 Parameters to pass to the assembler. Can be `None`.
120 isComponent : `bool`, optional
121 If this is a component, allow the inMemoryDataset to be `None`.
123 Returns
124 -------
125 dataset : `object`
126 In-memory dataset, potentially converted to expected type.
127 """
128 # Process any left over parameters
129 if assemblerParams:
130 inMemoryDataset = readStorageClass.delegate().handleParameters(inMemoryDataset, assemblerParams)
132 # Validate the returned data type matches the expected data type
133 pytype = readStorageClass.pytype
135 allowedTypes = []
136 if pytype:
137 allowedTypes.append(pytype)
139 # Special case components to allow them to be None
140 if isComponent:
141 allowedTypes.append(type(None))
143 if allowedTypes and not isinstance(inMemoryDataset, tuple(allowedTypes)):
144 inMemoryDataset = readStorageClass.coerce_type(inMemoryDataset)
146 return inMemoryDataset