Coverage for python/lsst/daf/butler/core/storedFileInfo.py: 50%
78 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-06 12:40 -0800
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-06 12:40 -0800
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ("StoredDatastoreItemInfo", "StoredFileInfo")
26import inspect
27from dataclasses import dataclass
28from typing import TYPE_CHECKING, Any, Dict, Optional, Type
30from lsst.resources import ResourcePath
32from .formatter import Formatter, FormatterParameter
33from .location import Location, LocationFactory
34from .storageClass import StorageClass, StorageClassFactory
36if TYPE_CHECKING: 36 ↛ 37line 36 didn't jump to line 37, because the condition on line 36 was never true
37 from .datasets import DatasetId, DatasetRef
39# String to use when a Python None is encountered
40NULLSTR = "__NULL_STRING__"
43class StoredDatastoreItemInfo:
44 """Internal information associated with a stored dataset in a `Datastore`.
46 This is an empty base class. Datastore implementations are expected to
47 write their own subclasses.
48 """
50 __slots__ = ()
52 def file_location(self, factory: LocationFactory) -> Location:
53 """Return the location of artifact.
55 Parameters
56 ----------
57 factory : `LocationFactory`
58 Factory relevant to the datastore represented by this item.
60 Returns
61 -------
62 location : `Location`
63 The location of the item within this datastore.
64 """
65 raise NotImplementedError("The base class does not know how to locate an item in a datastore.")
67 @classmethod
68 def from_record(cls: Type[StoredDatastoreItemInfo], record: Dict[str, Any]) -> StoredDatastoreItemInfo:
69 """Create instance from database record.
71 Parameters
72 ----------
73 record : `dict`
74 The record associated with this item.
76 Returns
77 -------
78 info : instance of the relevant type.
79 The newly-constructed item corresponding to the record.
80 """
81 raise NotImplementedError()
83 def to_record(self) -> Dict[str, Any]:
84 """Convert record contents to a dictionary."""
85 raise NotImplementedError()
87 @property
88 def dataset_id(self) -> DatasetId:
89 """Dataset ID associated with this record (`DatasetId`)"""
90 raise NotImplementedError()
93@dataclass(frozen=True)
94class StoredFileInfo(StoredDatastoreItemInfo):
95 """Datastore-private metadata associated with a Datastore file."""
97 __slots__ = {"formatter", "path", "storageClass", "component", "checksum", "file_size", "dataset_id"}
99 storageClassFactory = StorageClassFactory()
101 def __init__(
102 self,
103 formatter: FormatterParameter,
104 path: str,
105 storageClass: StorageClass,
106 component: Optional[str],
107 checksum: Optional[str],
108 file_size: int,
109 dataset_id: DatasetId,
110 ):
112 # Use these shenanigans to allow us to use a frozen dataclass
113 object.__setattr__(self, "path", path)
114 object.__setattr__(self, "storageClass", storageClass)
115 object.__setattr__(self, "component", component)
116 object.__setattr__(self, "checksum", checksum)
117 object.__setattr__(self, "file_size", file_size)
118 object.__setattr__(self, "dataset_id", dataset_id)
120 if isinstance(formatter, str):
121 # We trust that this string refers to a Formatter
122 formatterStr = formatter
123 elif isinstance(formatter, Formatter) or (
124 inspect.isclass(formatter) and issubclass(formatter, Formatter)
125 ):
126 formatterStr = formatter.name()
127 else:
128 raise TypeError(f"Supplied formatter '{formatter}' is not a Formatter")
129 object.__setattr__(self, "formatter", formatterStr)
131 formatter: str
132 """Fully-qualified name of Formatter. If a Formatter class or instance
133 is given the name will be extracted."""
135 path: str
136 """Path to dataset within Datastore."""
138 storageClass: StorageClass
139 """StorageClass associated with Dataset."""
141 component: Optional[str]
142 """Component associated with this file. Can be None if the file does
143 not refer to a component of a composite."""
145 checksum: Optional[str]
146 """Checksum of the serialized dataset."""
148 file_size: int
149 """Size of the serialized dataset in bytes."""
151 dataset_id: DatasetId
152 """DatasetId associated with this record."""
154 def rebase(self, ref: DatasetRef) -> StoredFileInfo:
155 """Return a copy of the record suitable for a specified reference.
157 Parameters
158 ----------
159 ref : `DatasetRef`
160 DatasetRef which provides component name and dataset ID for the
161 new returned record.
163 Returns
164 -------
165 record : `StoredFileInfo`
166 New record instance.
167 """
168 # take component and dataset_id from the ref, rest comes from self
169 component = ref.datasetType.component()
170 if component is None:
171 component = self.component
172 dataset_id = ref.getCheckedId()
173 return StoredFileInfo(
174 dataset_id=dataset_id,
175 formatter=self.formatter,
176 path=self.path,
177 storageClass=self.storageClass,
178 component=component,
179 checksum=self.checksum,
180 file_size=self.file_size,
181 )
183 def to_record(self) -> Dict[str, Any]:
184 """Convert the supplied ref to a database record."""
185 component = self.component
186 if component is None:
187 # Use empty string since we want this to be part of the
188 # primary key.
189 component = NULLSTR
190 return dict(
191 dataset_id=self.dataset_id,
192 formatter=self.formatter,
193 path=self.path,
194 storage_class=self.storageClass.name,
195 component=component,
196 checksum=self.checksum,
197 file_size=self.file_size,
198 )
200 def file_location(self, factory: LocationFactory) -> Location:
201 """Return the location of artifact.
203 Parameters
204 ----------
205 factory : `LocationFactory`
206 Factory relevant to the datastore represented by this item.
208 Returns
209 -------
210 location : `Location`
211 The location of the item within this datastore.
212 """
213 uriInStore = ResourcePath(self.path, forceAbsolute=False)
214 if uriInStore.isabs():
215 location = Location(None, uriInStore)
216 else:
217 location = factory.fromPath(uriInStore)
218 return location
220 @classmethod
221 def from_record(cls: Type[StoredFileInfo], record: Dict[str, Any]) -> StoredFileInfo:
222 """Create instance from database record.
224 Parameters
225 ----------
226 record : `dict`
227 The record associated with this item.
229 Returns
230 -------
231 info : `StoredFileInfo`
232 The newly-constructed item corresponding to the record.
233 """
234 # Convert name of StorageClass to instance
235 storageClass = cls.storageClassFactory.getStorageClass(record["storage_class"])
236 component = record["component"] if (record["component"] and record["component"] != NULLSTR) else None
238 info = StoredFileInfo(
239 formatter=record["formatter"],
240 path=record["path"],
241 storageClass=storageClass,
242 component=component,
243 checksum=record["checksum"],
244 file_size=record["file_size"],
245 dataset_id=record["dataset_id"],
246 )
247 return info