Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24"""In-memory datastore.""" 

25 

26__all__ = ("StoredMemoryItemInfo", "InMemoryDatastore") 

27 

28import time 

29import logging 

30from dataclasses import dataclass 

31from urllib.parse import urlencode 

32from typing import ( 

33 TYPE_CHECKING, 

34 Any, 

35 Dict, 

36 Iterable, 

37 List, 

38 Mapping, 

39 Optional, 

40 Set, 

41 Tuple, 

42 Union, 

43) 

44 

45from lsst.daf.butler import StoredDatastoreItemInfo, StorageClass, ButlerURI 

46from lsst.daf.butler.registry.interfaces import DatastoreRegistryBridge 

47from .genericDatastore import GenericBaseDatastore 

48 

49if TYPE_CHECKING: 49 ↛ 50line 49 didn't jump to line 50, because the condition on line 49 was never true

50 from lsst.daf.butler import (Config, DatasetRef, DatasetType, 

51 LookupKey) 

52 from lsst.daf.butler.registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager 

53 

54log = logging.getLogger(__name__) 

55 

56 

57@dataclass(frozen=True) 

58class StoredMemoryItemInfo(StoredDatastoreItemInfo): 

59 """Internal InMemoryDatastore Metadata associated with a stored 

60 DatasetRef. 

61 """ 

62 __slots__ = {"timestamp", "storageClass", "parentID"} 

63 

64 timestamp: float 

65 """Unix timestamp indicating the time the dataset was stored.""" 

66 

67 storageClass: StorageClass 

68 """StorageClass associated with the dataset.""" 

69 

70 parentID: int 

71 """ID of the parent `DatasetRef` if this entry is a concrete 

72 composite. Not used if the dataset being stored is not a 

73 virtual component of a composite 

74 """ 

75 

76 

77class InMemoryDatastore(GenericBaseDatastore): 

78 """Basic Datastore for writing to an in memory cache. 

79 

80 This datastore is ephemeral in that the contents of the datastore 

81 disappear when the Python process completes. This also means that 

82 other processes can not access this datastore. 

83 

84 Parameters 

85 ---------- 

86 config : `DatastoreConfig` or `str` 

87 Configuration. 

88 bridgeManager : `DatastoreRegistryBridgeManager` 

89 Object that manages the interface between `Registry` and datastores. 

90 butlerRoot : `str`, optional 

91 Unused parameter. 

92 

93 Notes 

94 ----- 

95 InMemoryDatastore does not support any file-based ingest. 

96 """ 

97 

98 defaultConfigFile = "datastores/inMemoryDatastore.yaml" 

99 """Path to configuration defaults. Relative to $DAF_BUTLER_DIR/config or 

100 absolute path. Can be None if no defaults specified. 

101 """ 

102 

103 isEphemeral = True 

104 """A new datastore is created every time and datasets disappear when 

105 the process shuts down.""" 

106 

107 datasets: Dict[int, Any] 

108 """Internal storage of datasets indexed by dataset ID.""" 

109 

110 records: Dict[int, StoredMemoryItemInfo] 

111 """Internal records about stored datasets.""" 

112 

113 def __init__(self, config: Union[Config, str], 

114 bridgeManager: DatastoreRegistryBridgeManager, 

115 butlerRoot: Optional[str] = None): 

116 super().__init__(config, bridgeManager) 

117 

118 # Name ourselves with the timestamp the datastore 

119 # was created. 

120 self.name = "{}@{}".format(type(self).__name__, time.time()) 

121 log.debug("Creating datastore %s", self.name) 

122 

123 # Storage of datasets, keyed by dataset_id 

124 self.datasets: Dict[int, Any] = {} 

125 

126 # Records is distinct in order to track concrete composite components 

127 # where we register multiple components for a single dataset. 

128 self.records: Dict[int, StoredMemoryItemInfo] = {} 

129 

130 # Related records that share the same parent 

131 self.related: Dict[int, Set[int]] = {} 

132 

133 self._bridge = bridgeManager.register(self.name, ephemeral=True) 

134 

135 @classmethod 

136 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: 

137 """Set any filesystem-dependent config options for this Datastore to 

138 be appropriate for a new empty repository with the given root. 

139 

140 Does nothing in this implementation. 

141 

142 Parameters 

143 ---------- 

144 root : `str` 

145 Filesystem path to the root of the data repository. 

146 config : `Config` 

147 A `Config` to update. Only the subset understood by 

148 this component will be updated. Will not expand 

149 defaults. 

150 full : `Config` 

151 A complete config with all defaults expanded that can be 

152 converted to a `DatastoreConfig`. Read-only and will not be 

153 modified by this method. 

154 Repository-specific options that should not be obtained 

155 from defaults when Butler instances are constructed 

156 should be copied from ``full`` to ``config``. 

157 overwrite : `bool`, optional 

158 If `False`, do not modify a value in ``config`` if the value 

159 already exists. Default is always to overwrite with the provided 

160 ``root``. 

161 

162 Notes 

163 ----- 

164 If a keyword is explicitly defined in the supplied ``config`` it 

165 will not be overridden by this method if ``overwrite`` is `False`. 

166 This allows explicit values set in external configs to be retained. 

167 """ 

168 return 

169 

170 @property 

171 def bridge(self) -> DatastoreRegistryBridge: 

172 # Docstring inherited from GenericBaseDatastore. 

173 return self._bridge 

174 

175 def addStoredItemInfo(self, refs: Iterable[DatasetRef], 

176 infos: Iterable[StoredMemoryItemInfo]) -> None: 

177 # Docstring inherited from GenericBaseDatastore. 

178 for ref, info in zip(refs, infos): 

179 if ref.id is None: 179 ↛ 180line 179 didn't jump to line 180, because the condition on line 179 was never true

180 raise RuntimeError(f"Can not store unresolved DatasetRef {ref}") 

181 self.records[ref.id] = info 

182 self.related.setdefault(info.parentID, set()).add(ref.id) 

183 

184 def getStoredItemInfo(self, ref: DatasetIdRef) -> StoredMemoryItemInfo: 

185 # Docstring inherited from GenericBaseDatastore. 

186 if ref.id is None: 186 ↛ 187line 186 didn't jump to line 187, because the condition on line 186 was never true

187 raise RuntimeError(f"Can not retrieve unresolved DatasetRef {ref}") 

188 return self.records[ref.id] 

189 

190 def getStoredItemsInfo(self, ref: DatasetIdRef) -> List[StoredMemoryItemInfo]: 

191 # Docstring inherited from GenericBaseDatastore. 

192 return [self.getStoredItemInfo(ref)] 

193 

194 def removeStoredItemInfo(self, ref: DatasetIdRef) -> None: 

195 # Docstring inherited from GenericBaseDatastore. 

196 # If a component has been removed previously then we can sometimes 

197 # be asked to remove it again. Other datastores ignore this 

198 # so also ignore here 

199 if ref.id is None: 199 ↛ 200line 199 didn't jump to line 200, because the condition on line 199 was never true

200 raise RuntimeError(f"Can not remove unresolved DatasetRef {ref}") 

201 if ref.id not in self.records: 201 ↛ 202line 201 didn't jump to line 202, because the condition on line 201 was never true

202 return 

203 record = self.records[ref.id] 

204 del self.records[ref.id] 

205 self.related[record.parentID].remove(ref.id) 

206 

207 def _get_dataset_info(self, ref: DatasetIdRef) -> Tuple[int, StoredMemoryItemInfo]: 

208 """Check that the dataset is present and return the real ID and 

209 associated information. 

210 

211 Parameters 

212 ---------- 

213 ref : `DatasetRef` 

214 Target `DatasetRef` 

215 

216 Returns 

217 ------- 

218 realID : `int` 

219 The dataset ID associated with this ref that shoul be used. This 

220 could either be the ID of the supplied `DatasetRef` or the parent. 

221 storageInfo : `StoredMemoryItemInfo` 

222 Associated storage information. 

223 

224 Raises 

225 ------ 

226 FileNotFoundError 

227 Raised if the dataset is not present in this datastore. 

228 """ 

229 try: 

230 storedItemInfo = self.getStoredItemInfo(ref) 

231 except KeyError: 

232 raise FileNotFoundError(f"No such file dataset in memory: {ref}") from None 

233 realID = ref.id 

234 if storedItemInfo.parentID is not None: 234 ↛ 237line 234 didn't jump to line 237, because the condition on line 234 was never false

235 realID = storedItemInfo.parentID 

236 

237 if realID not in self.datasets: 237 ↛ 238line 237 didn't jump to line 238, because the condition on line 237 was never true

238 raise FileNotFoundError(f"No such file dataset in memory: {ref}") 

239 

240 return realID, storedItemInfo 

241 

242 def exists(self, ref: DatasetRef) -> bool: 

243 """Check if the dataset exists in the datastore. 

244 

245 Parameters 

246 ---------- 

247 ref : `DatasetRef` 

248 Reference to the required dataset. 

249 

250 Returns 

251 ------- 

252 exists : `bool` 

253 `True` if the entity exists in the `Datastore`. 

254 """ 

255 try: 

256 self._get_dataset_info(ref) 

257 except FileNotFoundError: 

258 return False 

259 return True 

260 

261 def get(self, ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None) -> Any: 

262 """Load an InMemoryDataset from the store. 

263 

264 Parameters 

265 ---------- 

266 ref : `DatasetRef` 

267 Reference to the required Dataset. 

268 parameters : `dict` 

269 `StorageClass`-specific parameters that specify, for example, 

270 a slice of the dataset to be loaded. 

271 

272 Returns 

273 ------- 

274 inMemoryDataset : `object` 

275 Requested dataset or slice thereof as an InMemoryDataset. 

276 

277 Raises 

278 ------ 

279 FileNotFoundError 

280 Requested dataset can not be retrieved. 

281 TypeError 

282 Return value from formatter has unexpected type. 

283 ValueError 

284 Formatter failed to process the dataset. 

285 """ 

286 

287 log.debug("Retrieve %s from %s with parameters %s", ref, self.name, parameters) 

288 

289 realID, storedItemInfo = self._get_dataset_info(ref) 

290 

291 # We have a write storage class and a read storage class and they 

292 # can be different for concrete composites. 

293 readStorageClass = ref.datasetType.storageClass 

294 writeStorageClass = storedItemInfo.storageClass 

295 

296 # Check that the supplied parameters are suitable for the type read 

297 readStorageClass.validateParameters(parameters) 

298 

299 inMemoryDataset = self.datasets[realID] 

300 

301 component = ref.datasetType.component() 

302 

303 # Different storage classes implies a component request 

304 if readStorageClass != writeStorageClass: 

305 

306 if component is None: 306 ↛ 307line 306 didn't jump to line 307, because the condition on line 306 was never true

307 raise ValueError("Storage class inconsistency ({} vs {}) but no" 

308 " component requested".format(readStorageClass.name, 

309 writeStorageClass.name)) 

310 

311 # Concrete composite written as a single object (we hope) 

312 inMemoryDataset = writeStorageClass.assembler().getComponent(inMemoryDataset, component) 

313 

314 # Since there is no formatter to process parameters, they all must be 

315 # passed to the assembler. 

316 return self._post_process_get(inMemoryDataset, readStorageClass, parameters, 

317 isComponent=component is not None) 

318 

319 def put(self, inMemoryDataset: Any, ref: DatasetRef) -> None: 

320 """Write a InMemoryDataset with a given `DatasetRef` to the store. 

321 

322 Parameters 

323 ---------- 

324 inMemoryDataset : `object` 

325 The dataset to store. 

326 ref : `DatasetRef` 

327 Reference to the associated Dataset. 

328 

329 Raises 

330 ------ 

331 TypeError 

332 Supplied object and storage class are inconsistent. 

333 DatasetTypeNotSupportedError 

334 The associated `DatasetType` is not handled by this datastore. 

335 

336 Notes 

337 ----- 

338 If the datastore is configured to reject certain dataset types it 

339 is possible that the put will fail and raise a 

340 `DatasetTypeNotSupportedError`. The main use case for this is to 

341 allow `ChainedDatastore` to put to multiple datastores without 

342 requiring that every datastore accepts the dataset. 

343 """ 

344 

345 if ref.id is None: 345 ↛ 346line 345 didn't jump to line 346, because the condition on line 345 was never true

346 raise RuntimeError(f"Can not store unresolved DatasetRef {ref}") 

347 

348 self._validate_put_parameters(inMemoryDataset, ref) 

349 

350 self.datasets[ref.id] = inMemoryDataset 

351 log.debug("Store %s in %s", ref, self.name) 

352 

353 # Store time we received this content, to allow us to optionally 

354 # expire it. Instead of storing a filename here, we include the 

355 # ID of this datasetRef so we can find it from components. 

356 itemInfo = StoredMemoryItemInfo(time.time(), ref.datasetType.storageClass, 

357 parentID=ref.id) 

358 

359 # We have to register this content with registry. 

360 # Currently this assumes we have a file so we need to use stub entries 

361 # TODO: Add to ephemeral part of registry 

362 self._register_datasets([(ref, itemInfo)]) 

363 

364 if self._transaction is not None: 

365 self._transaction.registerUndo("put", self.remove, ref) 

366 

367 def getURIs(self, ref: DatasetRef, 

368 predict: bool = False) -> Tuple[Optional[ButlerURI], Dict[str, ButlerURI]]: 

369 """Return URIs associated with dataset. 

370 

371 Parameters 

372 ---------- 

373 ref : `DatasetRef` 

374 Reference to the required dataset. 

375 predict : `bool`, optional 

376 If the datastore does not know about the dataset, should it 

377 return a predicted URI or not? 

378 

379 Returns 

380 ------- 

381 primary : `ButlerURI` 

382 The URI to the primary artifact associated with this dataset. 

383 If the dataset was disassembled within the datastore this 

384 may be `None`. 

385 components : `dict` 

386 URIs to any components associated with the dataset artifact. 

387 Can be empty if there are no components. 

388 

389 Notes 

390 ----- 

391 The URIs returned for in-memory datastores are not usable but 

392 provide an indication of the associated dataset. 

393 """ 

394 

395 # Include the dataID as a URI query 

396 query = urlencode(ref.dataId) 

397 

398 # if this has never been written then we have to guess 

399 if not self.exists(ref): 

400 if not predict: 

401 raise FileNotFoundError("Dataset {} not in this datastore".format(ref)) 

402 name = f"{ref.datasetType.name}" 

403 fragment = "#predicted" 

404 else: 

405 realID, _ = self._get_dataset_info(ref) 

406 name = f"{id(self.datasets[realID])}?{query}" 

407 fragment = "" 

408 

409 return ButlerURI(f"mem://{name}?{query}{fragment}"), {} 

410 

411 def getURI(self, ref: DatasetRef, predict: bool = False) -> ButlerURI: 

412 """URI to the Dataset. 

413 

414 Always uses "mem://" URI prefix. 

415 

416 Parameters 

417 ---------- 

418 ref : `DatasetRef` 

419 Reference to the required Dataset. 

420 predict : `bool` 

421 If `True`, allow URIs to be returned of datasets that have not 

422 been written. 

423 

424 Returns 

425 ------- 

426 uri : `str` 

427 URI pointing to the dataset within the datastore. If the 

428 dataset does not exist in the datastore, and if ``predict`` is 

429 `True`, the URI will be a prediction and will include a URI 

430 fragment "#predicted". 

431 If the datastore does not have entities that relate well 

432 to the concept of a URI the returned URI string will be 

433 descriptive. The returned URI is not guaranteed to be obtainable. 

434 

435 Raises 

436 ------ 

437 FileNotFoundError 

438 A URI has been requested for a dataset that does not exist and 

439 guessing is not allowed. 

440 AssertionError 

441 Raised if an internal error occurs. 

442 """ 

443 primary, _ = self.getURIs(ref, predict) 

444 if primary is None: 444 ↛ 447line 444 didn't jump to line 447, because the condition on line 444 was never true

445 # This should be impossible since this datastore does 

446 # not disassemble. This check also helps mypy. 

447 raise AssertionError(f"Unexpectedly got no URI for in-memory datastore for {ref}") 

448 return primary 

449 

450 def trash(self, ref: DatasetRef, ignore_errors: bool = False) -> None: 

451 """Indicate to the Datastore that a dataset can be removed. 

452 

453 Parameters 

454 ---------- 

455 ref : `DatasetRef` 

456 Reference to the required Dataset. 

457 ignore_errors: `bool`, optional 

458 Indicate that errors should be ignored. 

459 

460 Raises 

461 ------ 

462 FileNotFoundError 

463 Attempt to remove a dataset that does not exist. 

464 

465 Notes 

466 ----- 

467 Concurrency should not normally be an issue for the in memory datastore 

468 since all internal changes are isolated to solely this process and 

469 the registry only changes rows associated with this process. 

470 """ 

471 

472 log.debug("Trash %s in datastore %s", ref, self.name) 

473 

474 # Check that this dataset is known to datastore 

475 try: 

476 self._get_dataset_info(ref) 

477 

478 # Move datasets to trash table 

479 self._move_to_trash_in_registry(ref) 

480 except Exception as e: 

481 if ignore_errors: 

482 log.warning("Error encountered moving dataset %s to trash in datastore %s: %s", 

483 ref, self.name, e) 

484 else: 

485 raise 

486 

487 def emptyTrash(self, ignore_errors: bool = False) -> None: 

488 """Remove all datasets from the trash. 

489 

490 Parameters 

491 ---------- 

492 ignore_errors : `bool`, optional 

493 Ignore errors. 

494 

495 Notes 

496 ----- 

497 The internal tracking of datasets is affected by this method and 

498 transaction handling is not supported if there is a problem before 

499 the datasets themselves are deleted. 

500 

501 Concurrency should not normally be an issue for the in memory datastore 

502 since all internal changes are isolated to solely this process and 

503 the registry only changes rows associated with this process. 

504 """ 

505 log.debug("Emptying trash in datastore %s", self.name) 

506 with self._bridge.emptyTrash() as trashed: 

507 for ref in trashed: 

508 try: 

509 realID, _ = self._get_dataset_info(ref) 

510 except Exception as e: 

511 if ignore_errors: 

512 log.warning("Emptying trash in datastore %s but encountered an " 

513 "error with dataset %s: %s", 

514 self.name, ref.id, e) 

515 continue 

516 else: 

517 raise 

518 

519 # Determine whether all references to this dataset have been 

520 # removed and we can delete the dataset itself 

521 allRefs = self.related[realID] 

522 remainingRefs = allRefs - {ref.id} 

523 if not remainingRefs: 523 ↛ 528line 523 didn't jump to line 528, because the condition on line 523 was never false

524 log.debug("Removing artifact %s from datastore %s", realID, self.name) 

525 del self.datasets[realID] 

526 

527 # Remove this entry 

528 self.removeStoredItemInfo(ref) 

529 

530 def validateConfiguration(self, entities: Iterable[Union[DatasetRef, DatasetType, StorageClass]], 

531 logFailures: bool = False) -> None: 

532 """Validate some of the configuration for this datastore. 

533 

534 Parameters 

535 ---------- 

536 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass` 

537 Entities to test against this configuration. Can be differing 

538 types. 

539 logFailures : `bool`, optional 

540 If `True`, output a log message for every validation error 

541 detected. 

542 

543 Raises 

544 ------ 

545 DatastoreValidationError 

546 Raised if there is a validation problem with a configuration. 

547 All the problems are reported in a single exception. 

548 

549 Notes 

550 ----- 

551 This method is a no-op. 

552 """ 

553 return 

554 

555 def _overrideTransferMode(self, *datasets: Any, transfer: Optional[str] = None) -> Optional[str]: 

556 # Docstring is inherited from base class 

557 return transfer 

558 

559 def validateKey(self, lookupKey: LookupKey, entity: Union[DatasetRef, DatasetType, StorageClass]) -> None: 

560 # Docstring is inherited from base class 

561 return 

562 

563 def getLookupKeys(self) -> Set[LookupKey]: 

564 # Docstring is inherited from base class 

565 return self.constraints.getLookupKeys()