Coverage for python/lsst/daf/butler/core/storedFileInfo.py: 50%

78 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-10-04 02:19 -0700

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ("StoredDatastoreItemInfo", "StoredFileInfo") 

25 

26import inspect 

27from dataclasses import dataclass 

28from typing import TYPE_CHECKING, Any, Dict, Optional, Type 

29 

30from lsst.resources import ResourcePath 

31 

32from .formatter import Formatter, FormatterParameter 

33from .location import Location, LocationFactory 

34from .storageClass import StorageClass, StorageClassFactory 

35 

36if TYPE_CHECKING: 36 ↛ 37line 36 didn't jump to line 37, because the condition on line 36 was never true

37 from .datasets import DatasetId, DatasetRef 

38 

39# String to use when a Python None is encountered 

40NULLSTR = "__NULL_STRING__" 

41 

42 

43class StoredDatastoreItemInfo: 

44 """Internal information associated with a stored dataset in a `Datastore`. 

45 

46 This is an empty base class. Datastore implementations are expected to 

47 write their own subclasses. 

48 """ 

49 

50 __slots__ = () 

51 

52 def file_location(self, factory: LocationFactory) -> Location: 

53 """Return the location of artifact. 

54 

55 Parameters 

56 ---------- 

57 factory : `LocationFactory` 

58 Factory relevant to the datastore represented by this item. 

59 

60 Returns 

61 ------- 

62 location : `Location` 

63 The location of the item within this datastore. 

64 """ 

65 raise NotImplementedError("The base class does not know how to locate an item in a datastore.") 

66 

67 @classmethod 

68 def from_record(cls: Type[StoredDatastoreItemInfo], record: Dict[str, Any]) -> StoredDatastoreItemInfo: 

69 """Create instance from database record. 

70 

71 Parameters 

72 ---------- 

73 record : `dict` 

74 The record associated with this item. 

75 

76 Returns 

77 ------- 

78 info : instance of the relevant type. 

79 The newly-constructed item corresponding to the record. 

80 """ 

81 raise NotImplementedError() 

82 

83 def to_record(self) -> Dict[str, Any]: 

84 """Convert record contents to a dictionary.""" 

85 raise NotImplementedError() 

86 

87 @property 

88 def dataset_id(self) -> DatasetId: 

89 """Dataset ID associated with this record (`DatasetId`)""" 

90 raise NotImplementedError() 

91 

92 

93@dataclass(frozen=True) 

94class StoredFileInfo(StoredDatastoreItemInfo): 

95 """Datastore-private metadata associated with a Datastore file.""" 

96 

97 __slots__ = {"formatter", "path", "storageClass", "component", "checksum", "file_size", "dataset_id"} 

98 

99 storageClassFactory = StorageClassFactory() 

100 

101 def __init__( 

102 self, 

103 formatter: FormatterParameter, 

104 path: str, 

105 storageClass: StorageClass, 

106 component: Optional[str], 

107 checksum: Optional[str], 

108 file_size: int, 

109 dataset_id: DatasetId, 

110 ): 

111 

112 # Use these shenanigans to allow us to use a frozen dataclass 

113 object.__setattr__(self, "path", path) 

114 object.__setattr__(self, "storageClass", storageClass) 

115 object.__setattr__(self, "component", component) 

116 object.__setattr__(self, "checksum", checksum) 

117 object.__setattr__(self, "file_size", file_size) 

118 object.__setattr__(self, "dataset_id", dataset_id) 

119 

120 if isinstance(formatter, str): 

121 # We trust that this string refers to a Formatter 

122 formatterStr = formatter 

123 elif isinstance(formatter, Formatter) or ( 

124 inspect.isclass(formatter) and issubclass(formatter, Formatter) 

125 ): 

126 formatterStr = formatter.name() 

127 else: 

128 raise TypeError(f"Supplied formatter '{formatter}' is not a Formatter") 

129 object.__setattr__(self, "formatter", formatterStr) 

130 

131 formatter: str 

132 """Fully-qualified name of Formatter. If a Formatter class or instance 

133 is given the name will be extracted.""" 

134 

135 path: str 

136 """Path to dataset within Datastore.""" 

137 

138 storageClass: StorageClass 

139 """StorageClass associated with Dataset.""" 

140 

141 component: Optional[str] 

142 """Component associated with this file. Can be None if the file does 

143 not refer to a component of a composite.""" 

144 

145 checksum: Optional[str] 

146 """Checksum of the serialized dataset.""" 

147 

148 file_size: int 

149 """Size of the serialized dataset in bytes.""" 

150 

151 dataset_id: DatasetId 

152 """DatasetId associated with this record.""" 

153 

154 def rebase(self, ref: DatasetRef) -> StoredFileInfo: 

155 """Return a copy of the record suitable for a specified reference. 

156 

157 Parameters 

158 ---------- 

159 ref : `DatasetRef` 

160 DatasetRef which provides component name and dataset ID for the 

161 new returned record. 

162 

163 Returns 

164 ------- 

165 record : `StoredFileInfo` 

166 New record instance. 

167 """ 

168 # take component and dataset_id from the ref, rest comes from self 

169 component = ref.datasetType.component() 

170 if component is None: 

171 component = self.component 

172 dataset_id = ref.getCheckedId() 

173 return StoredFileInfo( 

174 dataset_id=dataset_id, 

175 formatter=self.formatter, 

176 path=self.path, 

177 storageClass=self.storageClass, 

178 component=component, 

179 checksum=self.checksum, 

180 file_size=self.file_size, 

181 ) 

182 

183 def to_record(self) -> Dict[str, Any]: 

184 """Convert the supplied ref to a database record.""" 

185 component = self.component 

186 if component is None: 

187 # Use empty string since we want this to be part of the 

188 # primary key. 

189 component = NULLSTR 

190 return dict( 

191 dataset_id=self.dataset_id, 

192 formatter=self.formatter, 

193 path=self.path, 

194 storage_class=self.storageClass.name, 

195 component=component, 

196 checksum=self.checksum, 

197 file_size=self.file_size, 

198 ) 

199 

200 def file_location(self, factory: LocationFactory) -> Location: 

201 """Return the location of artifact. 

202 

203 Parameters 

204 ---------- 

205 factory : `LocationFactory` 

206 Factory relevant to the datastore represented by this item. 

207 

208 Returns 

209 ------- 

210 location : `Location` 

211 The location of the item within this datastore. 

212 """ 

213 uriInStore = ResourcePath(self.path, forceAbsolute=False) 

214 if uriInStore.isabs(): 

215 location = Location(None, uriInStore) 

216 else: 

217 location = factory.fromPath(uriInStore) 

218 return location 

219 

220 @classmethod 

221 def from_record(cls: Type[StoredFileInfo], record: Dict[str, Any]) -> StoredFileInfo: 

222 """Create instance from database record. 

223 

224 Parameters 

225 ---------- 

226 record : `dict` 

227 The record associated with this item. 

228 

229 Returns 

230 ------- 

231 info : `StoredFileInfo` 

232 The newly-constructed item corresponding to the record. 

233 """ 

234 # Convert name of StorageClass to instance 

235 storageClass = cls.storageClassFactory.getStorageClass(record["storage_class"]) 

236 component = record["component"] if (record["component"] and record["component"] != NULLSTR) else None 

237 

238 info = StoredFileInfo( 

239 formatter=record["formatter"], 

240 path=record["path"], 

241 storageClass=storageClass, 

242 component=component, 

243 checksum=record["checksum"], 

244 file_size=record["file_size"], 

245 dataset_id=record["dataset_id"], 

246 ) 

247 return info