Coverage for python/lsst/daf/butler/core/storedFileInfo.py: 50%
78 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-04 02:04 -0800
« prev ^ index » next coverage.py v6.5.0, created at 2023-03-04 02:04 -0800
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("StoredDatastoreItemInfo", "StoredFileInfo")
26import inspect
27from dataclasses import dataclass
28from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Type
30from lsst.resources import ResourcePath
32from .formatter import Formatter, FormatterParameter
33from .location import Location, LocationFactory
34from .storageClass import StorageClass, StorageClassFactory
36if TYPE_CHECKING: 36 ↛ 37line 36 didn't jump to line 37, because the condition on line 36 was never true
37 from .datasets import DatasetId, DatasetRef
39# String to use when a Python None is encountered
40NULLSTR = "__NULL_STRING__"
43class StoredDatastoreItemInfo:
44 """Internal information associated with a stored dataset in a `Datastore`.
46 This is an empty base class. Datastore implementations are expected to
47 write their own subclasses.
48 """
50 __slots__ = ()
52 def file_location(self, factory: LocationFactory) -> Location:
53 """Return the location of artifact.
55 Parameters
56 ----------
57 factory : `LocationFactory`
58 Factory relevant to the datastore represented by this item.
60 Returns
61 -------
62 location : `Location`
63 The location of the item within this datastore.
64 """
65 raise NotImplementedError("The base class does not know how to locate an item in a datastore.")
67 @classmethod
68 def from_record(cls: Type[StoredDatastoreItemInfo], record: Mapping[str, Any]) -> StoredDatastoreItemInfo:
69 """Create instance from database record.
71 Parameters
72 ----------
73 record : `dict`
74 The record associated with this item.
76 Returns
77 -------
78 info : instance of the relevant type.
79 The newly-constructed item corresponding to the record.
80 """
81 raise NotImplementedError()
83 def to_record(self) -> Dict[str, Any]:
84 """Convert record contents to a dictionary."""
85 raise NotImplementedError()
87 @property
88 def dataset_id(self) -> DatasetId:
89 """Dataset ID associated with this record (`DatasetId`)"""
90 raise NotImplementedError()
93@dataclass(frozen=True)
94class StoredFileInfo(StoredDatastoreItemInfo):
95 """Datastore-private metadata associated with a Datastore file."""
97 __slots__ = {"formatter", "path", "storageClass", "component", "checksum", "file_size", "dataset_id"}
99 storageClassFactory = StorageClassFactory()
101 def __init__(
102 self,
103 formatter: FormatterParameter,
104 path: str,
105 storageClass: StorageClass,
106 component: Optional[str],
107 checksum: Optional[str],
108 file_size: int,
109 dataset_id: DatasetId,
110 ):
111 # Use these shenanigans to allow us to use a frozen dataclass
112 object.__setattr__(self, "path", path)
113 object.__setattr__(self, "storageClass", storageClass)
114 object.__setattr__(self, "component", component)
115 object.__setattr__(self, "checksum", checksum)
116 object.__setattr__(self, "file_size", file_size)
117 object.__setattr__(self, "dataset_id", dataset_id)
119 if isinstance(formatter, str):
120 # We trust that this string refers to a Formatter
121 formatterStr = formatter
122 elif isinstance(formatter, Formatter) or (
123 inspect.isclass(formatter) and issubclass(formatter, Formatter)
124 ):
125 formatterStr = formatter.name()
126 else:
127 raise TypeError(f"Supplied formatter '{formatter}' is not a Formatter")
128 object.__setattr__(self, "formatter", formatterStr)
130 formatter: str
131 """Fully-qualified name of Formatter. If a Formatter class or instance
132 is given the name will be extracted."""
134 path: str
135 """Path to dataset within Datastore."""
137 storageClass: StorageClass
138 """StorageClass associated with Dataset."""
140 component: Optional[str]
141 """Component associated with this file. Can be None if the file does
142 not refer to a component of a composite."""
144 checksum: Optional[str]
145 """Checksum of the serialized dataset."""
147 file_size: int
148 """Size of the serialized dataset in bytes."""
150 dataset_id: DatasetId
151 """DatasetId associated with this record."""
153 def rebase(self, ref: DatasetRef) -> StoredFileInfo:
154 """Return a copy of the record suitable for a specified reference.
156 Parameters
157 ----------
158 ref : `DatasetRef`
159 DatasetRef which provides component name and dataset ID for the
160 new returned record.
162 Returns
163 -------
164 record : `StoredFileInfo`
165 New record instance.
166 """
167 # take component and dataset_id from the ref, rest comes from self
168 component = ref.datasetType.component()
169 if component is None:
170 component = self.component
171 dataset_id = ref.getCheckedId()
172 return StoredFileInfo(
173 dataset_id=dataset_id,
174 formatter=self.formatter,
175 path=self.path,
176 storageClass=self.storageClass,
177 component=component,
178 checksum=self.checksum,
179 file_size=self.file_size,
180 )
182 def to_record(self) -> Dict[str, Any]:
183 """Convert the supplied ref to a database record."""
184 component = self.component
185 if component is None:
186 # Use empty string since we want this to be part of the
187 # primary key.
188 component = NULLSTR
189 return dict(
190 dataset_id=self.dataset_id,
191 formatter=self.formatter,
192 path=self.path,
193 storage_class=self.storageClass.name,
194 component=component,
195 checksum=self.checksum,
196 file_size=self.file_size,
197 )
199 def file_location(self, factory: LocationFactory) -> Location:
200 """Return the location of artifact.
202 Parameters
203 ----------
204 factory : `LocationFactory`
205 Factory relevant to the datastore represented by this item.
207 Returns
208 -------
209 location : `Location`
210 The location of the item within this datastore.
211 """
212 uriInStore = ResourcePath(self.path, forceAbsolute=False)
213 if uriInStore.isabs():
214 location = Location(None, uriInStore)
215 else:
216 location = factory.fromPath(uriInStore)
217 return location
219 @classmethod
220 def from_record(cls: Type[StoredFileInfo], record: Mapping[str, Any]) -> StoredFileInfo:
221 """Create instance from database record.
223 Parameters
224 ----------
225 record : `dict`
226 The record associated with this item.
228 Returns
229 -------
230 info : `StoredFileInfo`
231 The newly-constructed item corresponding to the record.
232 """
233 # Convert name of StorageClass to instance
234 storageClass = cls.storageClassFactory.getStorageClass(record["storage_class"])
235 component = record["component"] if (record["component"] and record["component"] != NULLSTR) else None
237 info = StoredFileInfo(
238 formatter=record["formatter"],
239 path=record["path"],
240 storageClass=storageClass,
241 component=component,
242 checksum=record["checksum"],
243 file_size=record["file_size"],
244 dataset_id=record["dataset_id"],
245 )
246 return info