Coverage for python/lsst/daf/butler/datastore/_datastore.py: 64%

274 statements  

« prev     ^ index     » next       coverage.py v7.4.1, created at 2024-02-01 11:20 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This software is dual licensed under the GNU General Public License and also 

10# under a 3-clause BSD license. Recipients may choose which of these licenses 

11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt, 

12# respectively. If you choose the GPL option then the following text applies 

13# (but note that there is still no warranty even if you opt for BSD instead): 

14# 

15# This program is free software: you can redistribute it and/or modify 

16# it under the terms of the GNU General Public License as published by 

17# the Free Software Foundation, either version 3 of the License, or 

18# (at your option) any later version. 

19# 

20# This program is distributed in the hope that it will be useful, 

21# but WITHOUT ANY WARRANTY; without even the implied warranty of 

22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

23# GNU General Public License for more details. 

24# 

25# You should have received a copy of the GNU General Public License 

26# along with this program. If not, see <http://www.gnu.org/licenses/>. 

27 

28"""Support for generic data stores.""" 

29 

30from __future__ import annotations 

31 

32__all__ = ( 

33 "DatasetRefURIs", 

34 "Datastore", 

35 "DatastoreConfig", 

36 "DatastoreOpaqueTable", 

37 "DatastoreValidationError", 

38 "NullDatastore", 

39 "DatastoreTransaction", 

40) 

41 

42import contextlib 

43import dataclasses 

44import logging 

45import time 

46from abc import ABCMeta, abstractmethod 

47from collections import abc, defaultdict 

48from collections.abc import Callable, Collection, Iterable, Iterator, Mapping 

49from typing import TYPE_CHECKING, Any, ClassVar 

50 

51from lsst.utils import doImportType 

52 

53from .._config import Config, ConfigSubset 

54from .._exceptions import DatasetTypeNotSupportedError, ValidationError 

55from .._file_dataset import FileDataset 

56from .._storage_class import StorageClassFactory 

57from .constraints import Constraints 

58 

59if TYPE_CHECKING: 

60 from lsst.resources import ResourcePath, ResourcePathExpression 

61 

62 from .. import ddl 

63 from .._config_support import LookupKey 

64 from .._dataset_ref import DatasetRef 

65 from .._dataset_type import DatasetType 

66 from .._storage_class import StorageClass 

67 from ..registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager 

68 from .record_data import DatastoreRecordData 

69 from .stored_file_info import StoredDatastoreItemInfo 

70 

71_LOG = logging.getLogger(__name__) 

72 

73 

74class DatastoreConfig(ConfigSubset): 

75 """Configuration for Datastores.""" 

76 

77 component = "datastore" 

78 requiredKeys = ("cls",) 

79 defaultConfigFile = "datastore.yaml" 

80 

81 

82class DatastoreValidationError(ValidationError): 

83 """There is a problem with the Datastore configuration.""" 

84 

85 pass 

86 

87 

88@dataclasses.dataclass(frozen=True) 

89class Event: 

90 """Representation of an event that can be rolled back.""" 

91 

92 __slots__ = {"name", "undoFunc", "args", "kwargs"} 

93 name: str 

94 undoFunc: Callable 

95 args: tuple 

96 kwargs: dict 

97 

98 

99@dataclasses.dataclass(frozen=True) 

100class DatastoreOpaqueTable: 

101 """Definition of the opaque table which stores datastore records. 

102 

103 Table definition contains `.ddl.TableSpec` for a table and a class 

104 of a record which must be a subclass of `StoredDatastoreItemInfo`. 

105 """ 

106 

107 __slots__ = {"table_spec", "record_class"} 

108 table_spec: ddl.TableSpec 

109 record_class: type[StoredDatastoreItemInfo] 

110 

111 

112class IngestPrepData: 

113 """A helper base class for `Datastore` ingest implementations. 

114 

115 Datastore implementations will generally need a custom implementation of 

116 this class. 

117 

118 Should be accessed as ``Datastore.IngestPrepData`` instead of via direct 

119 import. 

120 

121 Parameters 

122 ---------- 

123 refs : iterable of `DatasetRef` 

124 References for the datasets that can be ingested by this datastore. 

125 """ 

126 

127 def __init__(self, refs: Iterable[DatasetRef]): 

128 self.refs = {ref.id: ref for ref in refs} 

129 

130 

131class DatastoreTransaction: 

132 """Keeps a log of `Datastore` activity and allow rollback. 

133 

134 Parameters 

135 ---------- 

136 parent : `DatastoreTransaction`, optional 

137 The parent transaction (if any). 

138 """ 

139 

140 Event: ClassVar[type] = Event 

141 

142 parent: DatastoreTransaction | None 

143 """The parent transaction. (`DatastoreTransaction`, optional)""" 

144 

145 def __init__(self, parent: DatastoreTransaction | None = None): 

146 self.parent = parent 

147 self._log: list[Event] = [] 

148 

149 def registerUndo(self, name: str, undoFunc: Callable, *args: Any, **kwargs: Any) -> None: 

150 """Register event with undo function. 

151 

152 Parameters 

153 ---------- 

154 name : `str` 

155 Name of the event. 

156 undoFunc : `~collections.abc.Callable` 

157 Function to undo this event. 

158 *args : `tuple` 

159 Positional arguments to ``undoFunc``. 

160 **kwargs 

161 Keyword arguments to ``undoFunc``. 

162 """ 

163 self._log.append(self.Event(name, undoFunc, args, kwargs)) 

164 

165 @contextlib.contextmanager 

166 def undoWith(self, name: str, undoFunc: Callable, *args: Any, **kwargs: Any) -> Iterator[None]: 

167 """Register undo function if nested operation succeeds. 

168 

169 Calls `registerUndo`. 

170 

171 This can be used to wrap individual undo-able statements within a 

172 DatastoreTransaction block. Multiple statements that can fail 

173 separately should not be part of the same `undoWith` block. 

174 

175 All arguments are forwarded directly to `registerUndo`. 

176 

177 Parameters 

178 ---------- 

179 name : `str` 

180 The name to associate with this event. 

181 undoFunc : `~collections.abc.Callable` 

182 Function to undo this event. 

183 *args : `tuple` 

184 Positional arguments for ``undoFunc``. 

185 **kwargs : `typing.Any` 

186 Keyword arguments for ``undoFunc``. 

187 """ 

188 try: 

189 yield None 

190 except BaseException: 

191 raise 

192 else: 

193 self.registerUndo(name, undoFunc, *args, **kwargs) 

194 

195 def rollback(self) -> None: 

196 """Roll back all events in this transaction.""" 

197 log = logging.getLogger(__name__) 

198 while self._log: 

199 ev = self._log.pop() 

200 try: 

201 log.debug( 

202 "Rolling back transaction: %s: %s(%s,%s)", 

203 ev.name, 

204 ev.undoFunc, 

205 ",".join(str(a) for a in ev.args), 

206 ",".join(f"{k}={v}" for k, v in ev.kwargs.items()), 

207 ) 

208 except Exception: 

209 # In case we had a problem in stringification of arguments 

210 log.warning("Rolling back transaction: %s", ev.name) 

211 try: 

212 ev.undoFunc(*ev.args, **ev.kwargs) 

213 except BaseException as e: 

214 # Deliberately swallow error that may occur in unrolling 

215 log.warning("Exception: %s caught while unrolling: %s", e, ev.name) 

216 pass 

217 

218 def commit(self) -> None: 

219 """Commit this transaction.""" 

220 if self.parent is None: 

221 # Just forget about the events, they have already happened. 

222 return 

223 else: 

224 # We may still want to events from this transaction as part of 

225 # the parent. 

226 self.parent._log.extend(self._log) 

227 

228 

229@dataclasses.dataclass 

230class DatasetRefURIs(abc.Sequence): 

231 """Represents the primary and component ResourcePath(s) associated with a 

232 DatasetRef. 

233 

234 This is used in places where its members used to be represented as a tuple 

235 `(primaryURI, componentURIs)`. To maintain backward compatibility this 

236 inherits from Sequence and so instances can be treated as a two-item 

237 tuple. 

238 

239 Parameters 

240 ---------- 

241 primaryURI : `lsst.resources.ResourcePath` or `None`, optional 

242 The URI to the primary artifact associated with this dataset. If the 

243 dataset was disassembled within the datastore this may be `None`. 

244 componentURIs : `dict` [`str`, `~lsst.resources.ResourcePath`] or `None` 

245 The URIs to any components associated with the dataset artifact 

246 indexed by component name. This can be empty if there are no 

247 components. 

248 """ 

249 

250 def __init__( 

251 self, 

252 primaryURI: ResourcePath | None = None, 

253 componentURIs: dict[str, ResourcePath] | None = None, 

254 ): 

255 self.primaryURI = primaryURI 

256 self.componentURIs = componentURIs or {} 

257 

258 def __getitem__(self, index: Any) -> Any: 

259 """Get primaryURI and componentURIs by index. 

260 

261 Provides support for tuple-like access. 

262 """ 

263 if index == 0: 

264 return self.primaryURI 

265 elif index == 1: 

266 return self.componentURIs 

267 raise IndexError("list index out of range") 

268 

269 def __len__(self) -> int: 

270 """Get the number of data members. 

271 

272 Provides support for tuple-like access. 

273 """ 

274 return 2 

275 

276 def __repr__(self) -> str: 

277 return f"DatasetRefURIs({repr(self.primaryURI)}, {repr(self.componentURIs)})" 

278 

279 

280class Datastore(metaclass=ABCMeta): 

281 """Datastore interface. 

282 

283 Parameters 

284 ---------- 

285 config : `DatastoreConfig` or `str` 

286 Load configuration either from an existing config instance or by 

287 referring to a configuration file. 

288 bridgeManager : `DatastoreRegistryBridgeManager` 

289 Object that manages the interface between `Registry` and datastores. 

290 """ 

291 

292 defaultConfigFile: ClassVar[str | None] = None 

293 """Path to configuration defaults. Accessed within the ``config`` resource 

294 or relative to a search path. Can be None if no defaults specified. 

295 """ 

296 

297 containerKey: ClassVar[str | None] = None 

298 """Name of the key containing a list of subconfigurations that also 

299 need to be merged with defaults and will likely use different Python 

300 datastore classes (but all using DatastoreConfig). Assumed to be a 

301 list of configurations that can be represented in a DatastoreConfig 

302 and containing a "cls" definition. None indicates that no containers 

303 are expected in this Datastore.""" 

304 

305 isEphemeral: bool = False 

306 """Indicate whether this Datastore is ephemeral or not. An ephemeral 

307 datastore is one where the contents of the datastore will not exist 

308 across process restarts. This value can change per-instance.""" 

309 

310 config: DatastoreConfig 

311 """Configuration used to create Datastore.""" 

312 

313 name: str 

314 """Label associated with this Datastore.""" 

315 

316 storageClassFactory: StorageClassFactory 

317 """Factory for creating storage class instances from name.""" 

318 

319 constraints: Constraints 

320 """Constraints to apply when putting datasets into the datastore.""" 

321 

322 # MyPy does not like for this to be annotated as any kind of type, because 

323 # it can't do static checking on type variables that can change at runtime. 

324 IngestPrepData: ClassVar[Any] = IngestPrepData 

325 """Helper base class for ingest implementations. 

326 """ 

327 

328 @classmethod 

329 @abstractmethod 

330 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: 

331 """Set filesystem-dependent config options for this datastore. 

332 

333 The options will be appropriate for a new empty repository with the 

334 given root. 

335 

336 Parameters 

337 ---------- 

338 root : `str` 

339 Filesystem path to the root of the data repository. 

340 config : `Config` 

341 A `Config` to update. Only the subset understood by 

342 this component will be updated. Will not expand 

343 defaults. 

344 full : `Config` 

345 A complete config with all defaults expanded that can be 

346 converted to a `DatastoreConfig`. Read-only and will not be 

347 modified by this method. 

348 Repository-specific options that should not be obtained 

349 from defaults when Butler instances are constructed 

350 should be copied from ``full`` to ``config``. 

351 overwrite : `bool`, optional 

352 If `False`, do not modify a value in ``config`` if the value 

353 already exists. Default is always to overwrite with the provided 

354 ``root``. 

355 

356 Notes 

357 ----- 

358 If a keyword is explicitly defined in the supplied ``config`` it 

359 will not be overridden by this method if ``overwrite`` is `False`. 

360 This allows explicit values set in external configs to be retained. 

361 """ 

362 raise NotImplementedError() 

363 

364 @staticmethod 

365 def fromConfig( 

366 config: Config, 

367 bridgeManager: DatastoreRegistryBridgeManager, 

368 butlerRoot: ResourcePathExpression | None = None, 

369 ) -> Datastore: 

370 """Create datastore from type specified in config file. 

371 

372 Parameters 

373 ---------- 

374 config : `Config` or `~lsst.resources.ResourcePathExpression` 

375 Configuration instance. 

376 bridgeManager : `DatastoreRegistryBridgeManager` 

377 Object that manages the interface between `Registry` and 

378 datastores. 

379 butlerRoot : `str`, optional 

380 Butler root directory. 

381 """ 

382 config = DatastoreConfig(config) 

383 cls = doImportType(config["cls"]) 

384 if not issubclass(cls, Datastore): 

385 raise TypeError(f"Imported child class {config['cls']} is not a Datastore") 

386 return cls._create_from_config(config=config, bridgeManager=bridgeManager, butlerRoot=butlerRoot) 

387 

388 def __init__( 

389 self, 

390 config: DatastoreConfig, 

391 bridgeManager: DatastoreRegistryBridgeManager, 

392 ): 

393 self.config = config 

394 self.name = "ABCDataStore" 

395 self._transaction: DatastoreTransaction | None = None 

396 

397 # All Datastores need storage classes and constraints 

398 self.storageClassFactory = StorageClassFactory() 

399 

400 # And read the constraints list 

401 constraintsConfig = self.config.get("constraints") 

402 self.constraints = Constraints(constraintsConfig, universe=bridgeManager.universe) 

403 

404 @classmethod 

405 @abstractmethod 

406 def _create_from_config( 

407 cls, 

408 config: DatastoreConfig, 

409 bridgeManager: DatastoreRegistryBridgeManager, 

410 butlerRoot: ResourcePathExpression | None, 

411 ) -> Datastore: 

412 """`Datastore`.``fromConfig`` calls this to instantiate Datastore 

413 subclasses. This is the primary constructor for the individual 

414 Datastore subclasses. 

415 """ 

416 raise NotImplementedError() 

417 

418 @abstractmethod 

419 def clone(self, bridgeManager: DatastoreRegistryBridgeManager) -> Datastore: 

420 """Make an independent copy of this Datastore with a different 

421 `DatastoreRegistryBridgeManager` instance. 

422 

423 Parameters 

424 ---------- 

425 bridgeManager : `DatastoreRegistryBridgeManager` 

426 New `DatastoreRegistryBridgeManager` object to use when 

427 instantiating managers. 

428 

429 Returns 

430 ------- 

431 datastore : `Datastore` 

432 New `Datastore` instance with the same configuration as the 

433 existing instance. 

434 """ 

435 raise NotImplementedError() 

436 

437 def __str__(self) -> str: 

438 return self.name 

439 

440 def __repr__(self) -> str: 

441 return self.name 

442 

443 @property 

444 def names(self) -> tuple[str, ...]: 

445 """Names associated with this datastore returned as a list. 

446 

447 Can be different to ``name`` for a chaining datastore. 

448 """ 

449 # Default implementation returns solely the name itself 

450 return (self.name,) 

451 

452 @property 

453 def roots(self) -> dict[str, ResourcePath | None]: 

454 """Return the root URIs for each named datastore. 

455 

456 Mapping from datastore name to root URI. The URI can be `None` 

457 if a datastore has no concept of a root URI. 

458 (`dict` [`str`, `ResourcePath` | `None`]) 

459 """ 

460 return {self.name: None} 

461 

462 @contextlib.contextmanager 

463 def transaction(self) -> Iterator[DatastoreTransaction]: 

464 """Context manager supporting `Datastore` transactions. 

465 

466 Transactions can be nested, and are to be used in combination with 

467 `Registry.transaction`. 

468 """ 

469 self._transaction = DatastoreTransaction(self._transaction) 

470 try: 

471 yield self._transaction 

472 except BaseException: 

473 self._transaction.rollback() 

474 raise 

475 else: 

476 self._transaction.commit() 

477 self._transaction = self._transaction.parent 

478 

479 def _set_trust_mode(self, mode: bool) -> None: 

480 """Set the trust mode for this datastore. 

481 

482 Parameters 

483 ---------- 

484 mode : `bool` 

485 If `True`, get requests will be attempted even if the datastore 

486 does not know about the dataset. 

487 

488 Notes 

489 ----- 

490 This is a private method to indicate that trust mode might be a 

491 transitory property that we do not want to make fully public. For now 

492 only a `~lsst.daf.butler.datastores.FileDatastore` understands this 

493 concept. By default this method does nothing. 

494 """ 

495 return 

496 

497 @abstractmethod 

498 def knows(self, ref: DatasetRef) -> bool: 

499 """Check if the dataset is known to the datastore. 

500 

501 Does not check for existence of any artifact. 

502 

503 Parameters 

504 ---------- 

505 ref : `DatasetRef` 

506 Reference to the required dataset. 

507 

508 Returns 

509 ------- 

510 exists : `bool` 

511 `True` if the dataset is known to the datastore. 

512 """ 

513 raise NotImplementedError() 

514 

515 def knows_these(self, refs: Iterable[DatasetRef]) -> dict[DatasetRef, bool]: 

516 """Check which of the given datasets are known to this datastore. 

517 

518 This is like ``mexist()`` but does not check that the file exists. 

519 

520 Parameters 

521 ---------- 

522 refs : iterable `DatasetRef` 

523 The datasets to check. 

524 

525 Returns 

526 ------- 

527 exists : `dict`[`DatasetRef`, `bool`] 

528 Mapping of dataset to boolean indicating whether the dataset 

529 is known to the datastore. 

530 """ 

531 # Non-optimized default calls knows() repeatedly. 

532 return {ref: self.knows(ref) for ref in refs} 

533 

534 def mexists( 

535 self, refs: Iterable[DatasetRef], artifact_existence: dict[ResourcePath, bool] | None = None 

536 ) -> dict[DatasetRef, bool]: 

537 """Check the existence of multiple datasets at once. 

538 

539 Parameters 

540 ---------- 

541 refs : iterable of `DatasetRef` 

542 The datasets to be checked. 

543 artifact_existence : `dict` [`lsst.resources.ResourcePath`, `bool`] 

544 Optional mapping of datastore artifact to existence. Updated by 

545 this method with details of all artifacts tested. Can be `None` 

546 if the caller is not interested. 

547 

548 Returns 

549 ------- 

550 existence : `dict` of [`DatasetRef`, `bool`] 

551 Mapping from dataset to boolean indicating existence. 

552 """ 

553 existence: dict[DatasetRef, bool] = {} 

554 # Non-optimized default. 

555 for ref in refs: 

556 existence[ref] = self.exists(ref) 

557 return existence 

558 

559 @abstractmethod 

560 def exists(self, datasetRef: DatasetRef) -> bool: 

561 """Check if the dataset exists in the datastore. 

562 

563 Parameters 

564 ---------- 

565 datasetRef : `DatasetRef` 

566 Reference to the required dataset. 

567 

568 Returns 

569 ------- 

570 exists : `bool` 

571 `True` if the entity exists in the `Datastore`. 

572 """ 

573 raise NotImplementedError("Must be implemented by subclass") 

574 

575 @abstractmethod 

576 def get( 

577 self, 

578 datasetRef: DatasetRef, 

579 parameters: Mapping[str, Any] | None = None, 

580 storageClass: StorageClass | str | None = None, 

581 ) -> Any: 

582 """Load an `InMemoryDataset` from the store. 

583 

584 Parameters 

585 ---------- 

586 datasetRef : `DatasetRef` 

587 Reference to the required Dataset. 

588 parameters : `dict` 

589 `StorageClass`-specific parameters that specify a slice of the 

590 Dataset to be loaded. 

591 storageClass : `StorageClass` or `str`, optional 

592 The storage class to be used to override the Python type 

593 returned by this method. By default the returned type matches 

594 the dataset type definition for this dataset. Specifying a 

595 read `StorageClass` can force a different type to be returned. 

596 This type must be compatible with the original type. 

597 

598 Returns 

599 ------- 

600 inMemoryDataset : `object` 

601 Requested Dataset or slice thereof as an InMemoryDataset. 

602 """ 

603 raise NotImplementedError("Must be implemented by subclass") 

604 

605 def prepare_get_for_external_client(self, ref: DatasetRef) -> object: 

606 """Retrieve serializable data that can be used to execute a ``get()``. 

607 

608 Parameters 

609 ---------- 

610 ref : `DatasetRef` 

611 Reference to the required dataset. 

612 

613 Returns 

614 ------- 

615 payload : `object` 

616 Serializable payload containing the information needed to perform a 

617 get() operation. This payload may be sent over the wire to another 

618 system to perform the get(). 

619 """ 

620 raise NotImplementedError() 

621 

622 @abstractmethod 

623 def put(self, inMemoryDataset: Any, datasetRef: DatasetRef) -> None: 

624 """Write a `InMemoryDataset` with a given `DatasetRef` to the store. 

625 

626 Parameters 

627 ---------- 

628 inMemoryDataset : `object` 

629 The Dataset to store. 

630 datasetRef : `DatasetRef` 

631 Reference to the associated Dataset. 

632 """ 

633 raise NotImplementedError("Must be implemented by subclass") 

634 

635 @abstractmethod 

636 def put_new(self, in_memory_dataset: Any, ref: DatasetRef) -> Mapping[str, DatasetRef]: 

637 """Write a `InMemoryDataset` with a given `DatasetRef` to the store. 

638 

639 Parameters 

640 ---------- 

641 in_memory_dataset : `object` 

642 The Dataset to store. 

643 ref : `DatasetRef` 

644 Reference to the associated Dataset. 

645 

646 Returns 

647 ------- 

648 datastore_refs : `~collections.abc.Mapping` [`str`, `DatasetRef`] 

649 Mapping of a datastore name to dataset reference stored in that 

650 datastore, reference will include datastore records. Only 

651 non-ephemeral datastores will appear in this mapping. 

652 """ 

653 raise NotImplementedError("Must be implemented by subclass") 

654 

655 def _overrideTransferMode(self, *datasets: FileDataset, transfer: str | None = None) -> str | None: 

656 """Allow ingest transfer mode to be defaulted based on datasets. 

657 

658 Parameters 

659 ---------- 

660 *datasets : `FileDataset` 

661 Each positional argument is a struct containing information about 

662 a file to be ingested, including its path (either absolute or 

663 relative to the datastore root, if applicable), a complete 

664 `DatasetRef` (with ``dataset_id not None``), and optionally a 

665 formatter class or its fully-qualified string name. If a formatter 

666 is not provided, this method should populate that attribute with 

667 the formatter the datastore would use for `put`. Subclasses are 

668 also permitted to modify the path attribute (typically to put it 

669 in what the datastore considers its standard form). 

670 transfer : `str`, optional 

671 How (and whether) the dataset should be added to the datastore. 

672 See `ingest` for details of transfer modes. 

673 

674 Returns 

675 ------- 

676 newTransfer : `str` 

677 Transfer mode to use. Will be identical to the supplied transfer 

678 mode unless "auto" is used. 

679 """ 

680 if transfer != "auto": 

681 return transfer 

682 raise RuntimeError(f"{transfer} is not allowed without specialization.") 

683 

684 def _prepIngest(self, *datasets: FileDataset, transfer: str | None = None) -> IngestPrepData: 

685 """Process datasets to identify which ones can be ingested. 

686 

687 Parameters 

688 ---------- 

689 *datasets : `FileDataset` 

690 Each positional argument is a struct containing information about 

691 a file to be ingested, including its path (either absolute or 

692 relative to the datastore root, if applicable), a complete 

693 `DatasetRef` (with ``dataset_id not None``), and optionally a 

694 formatter class or its fully-qualified string name. If a formatter 

695 is not provided, this method should populate that attribute with 

696 the formatter the datastore would use for `put`. Subclasses are 

697 also permitted to modify the path attribute (typically to put it 

698 in what the datastore considers its standard form). 

699 transfer : `str`, optional 

700 How (and whether) the dataset should be added to the datastore. 

701 See `ingest` for details of transfer modes. 

702 

703 Returns 

704 ------- 

705 data : `IngestPrepData` 

706 An instance of a subclass of `IngestPrepData`, used to pass 

707 arbitrary data from `_prepIngest` to `_finishIngest`. This should 

708 include only the datasets this datastore can actually ingest; 

709 others should be silently ignored (`Datastore.ingest` will inspect 

710 `IngestPrepData.refs` and raise `DatasetTypeNotSupportedError` if 

711 necessary). 

712 

713 Raises 

714 ------ 

715 NotImplementedError 

716 Raised if the datastore does not support the given transfer mode 

717 (including the case where ingest is not supported at all). 

718 FileNotFoundError 

719 Raised if one of the given files does not exist. 

720 FileExistsError 

721 Raised if transfer is not `None` but the (internal) location the 

722 file would be moved to is already occupied. 

723 

724 Notes 

725 ----- 

726 This method (along with `_finishIngest`) should be implemented by 

727 subclasses to provide ingest support instead of implementing `ingest` 

728 directly. 

729 

730 `_prepIngest` should not modify the data repository or given files in 

731 any way; all changes should be deferred to `_finishIngest`. 

732 

733 When possible, exceptions should be raised in `_prepIngest` instead of 

734 `_finishIngest`. `NotImplementedError` exceptions that indicate that 

735 the transfer mode is not supported must be raised by `_prepIngest` 

736 instead of `_finishIngest`. 

737 """ 

738 raise NotImplementedError(f"Datastore {self} does not support direct file-based ingest.") 

739 

740 def _finishIngest( 

741 self, prepData: IngestPrepData, *, transfer: str | None = None, record_validation_info: bool = True 

742 ) -> None: 

743 """Complete an ingest operation. 

744 

745 Parameters 

746 ---------- 

747 prepData : `IngestPrepData` 

748 An instance of a subclass of `IngestPrepData`. Guaranteed to be 

749 the direct result of a call to `_prepIngest` on this datastore. 

750 transfer : `str`, optional 

751 How (and whether) the dataset should be added to the datastore. 

752 See `ingest` for details of transfer modes. 

753 record_validation_info : `bool`, optional 

754 If `True`, the default, the datastore can record validation 

755 information associated with the file. If `False` the datastore 

756 will not attempt to track any information such as checksums 

757 or file sizes. This can be useful if such information is tracked 

758 in an external system or if the file is to be compressed in place. 

759 It is up to the datastore whether this parameter is relevant. 

760 

761 Raises 

762 ------ 

763 FileNotFoundError 

764 Raised if one of the given files does not exist. 

765 FileExistsError 

766 Raised if transfer is not `None` but the (internal) location the 

767 file would be moved to is already occupied. 

768 

769 Notes 

770 ----- 

771 This method (along with `_prepIngest`) should be implemented by 

772 subclasses to provide ingest support instead of implementing `ingest` 

773 directly. 

774 """ 

775 raise NotImplementedError(f"Datastore {self} does not support direct file-based ingest.") 

776 

777 def ingest( 

778 self, *datasets: FileDataset, transfer: str | None = None, record_validation_info: bool = True 

779 ) -> None: 

780 """Ingest one or more files into the datastore. 

781 

782 Parameters 

783 ---------- 

784 *datasets : `FileDataset` 

785 Each positional argument is a struct containing information about 

786 a file to be ingested, including its path (either absolute or 

787 relative to the datastore root, if applicable), a complete 

788 `DatasetRef` (with ``dataset_id not None``), and optionally a 

789 formatter class or its fully-qualified string name. If a formatter 

790 is not provided, the one the datastore would use for ``put`` on 

791 that dataset is assumed. 

792 transfer : `str`, optional 

793 How (and whether) the dataset should be added to the datastore. 

794 If `None` (default), the file must already be in a location 

795 appropriate for the datastore (e.g. within its root directory), 

796 and will not be modified. Other choices include "move", "copy", 

797 "link", "symlink", "relsymlink", and "hardlink". "link" is a 

798 special transfer mode that will first try to make a hardlink and 

799 if that fails a symlink will be used instead. "relsymlink" creates 

800 a relative symlink rather than use an absolute path. 

801 Most datastores do not support all transfer modes. 

802 "auto" is a special option that will let the 

803 data store choose the most natural option for itself. 

804 record_validation_info : `bool`, optional 

805 If `True`, the default, the datastore can record validation 

806 information associated with the file. If `False` the datastore 

807 will not attempt to track any information such as checksums 

808 or file sizes. This can be useful if such information is tracked 

809 in an external system or if the file is to be compressed in place. 

810 It is up to the datastore whether this parameter is relevant. 

811 

812 Raises 

813 ------ 

814 NotImplementedError 

815 Raised if the datastore does not support the given transfer mode 

816 (including the case where ingest is not supported at all). 

817 DatasetTypeNotSupportedError 

818 Raised if one or more files to be ingested have a dataset type that 

819 is not supported by the datastore. 

820 FileNotFoundError 

821 Raised if one of the given files does not exist. 

822 FileExistsError 

823 Raised if transfer is not `None` but the (internal) location the 

824 file would be moved to is already occupied. 

825 

826 Notes 

827 ----- 

828 Subclasses should implement `_prepIngest` and `_finishIngest` instead 

829 of implementing `ingest` directly. Datastores that hold and 

830 delegate to child datastores may want to call those methods as well. 

831 

832 Subclasses are encouraged to document their supported transfer modes 

833 in their class documentation. 

834 """ 

835 # Allow a datastore to select a default transfer mode 

836 transfer = self._overrideTransferMode(*datasets, transfer=transfer) 

837 prepData = self._prepIngest(*datasets, transfer=transfer) 

838 refs = {ref.id: ref for dataset in datasets for ref in dataset.refs} 

839 if refs.keys() != prepData.refs.keys(): 

840 unsupported = refs.keys() - prepData.refs.keys() 

841 # Group unsupported refs by DatasetType for an informative 

842 # but still concise error message. 

843 byDatasetType = defaultdict(list) 

844 for datasetId in unsupported: 

845 ref = refs[datasetId] 

846 byDatasetType[ref.datasetType].append(ref) 

847 raise DatasetTypeNotSupportedError( 

848 "DatasetType(s) not supported in ingest: " 

849 + ", ".join(f"{k.name} ({len(v)} dataset(s))" for k, v in byDatasetType.items()) 

850 ) 

851 self._finishIngest(prepData, transfer=transfer, record_validation_info=record_validation_info) 

852 

853 def transfer_from( 

854 self, 

855 source_datastore: Datastore, 

856 refs: Collection[DatasetRef], 

857 transfer: str = "auto", 

858 artifact_existence: dict[ResourcePath, bool] | None = None, 

859 dry_run: bool = False, 

860 ) -> tuple[set[DatasetRef], set[DatasetRef]]: 

861 """Transfer dataset artifacts from another datastore to this one. 

862 

863 Parameters 

864 ---------- 

865 source_datastore : `Datastore` 

866 The datastore from which to transfer artifacts. That datastore 

867 must be compatible with this datastore receiving the artifacts. 

868 refs : `~collections.abc.Collection` of `DatasetRef` 

869 The datasets to transfer from the source datastore. 

870 transfer : `str`, optional 

871 How (and whether) the dataset should be added to the datastore. 

872 Choices include "move", "copy", 

873 "link", "symlink", "relsymlink", and "hardlink". "link" is a 

874 special transfer mode that will first try to make a hardlink and 

875 if that fails a symlink will be used instead. "relsymlink" creates 

876 a relative symlink rather than use an absolute path. 

877 Most datastores do not support all transfer modes. 

878 "auto" (the default) is a special option that will let the 

879 data store choose the most natural option for itself. 

880 If the source location and transfer location are identical the 

881 transfer mode will be ignored. 

882 artifact_existence : `dict` [`lsst.resources.ResourcePath`, `bool`] 

883 Optional mapping of datastore artifact to existence. Updated by 

884 this method with details of all artifacts tested. Can be `None` 

885 if the caller is not interested. 

886 dry_run : `bool`, optional 

887 Process the supplied source refs without updating the target 

888 datastore. 

889 

890 Returns 

891 ------- 

892 accepted : `set` [`DatasetRef`] 

893 The datasets that were transferred. 

894 rejected : `set` [`DatasetRef`] 

895 The datasets that were rejected due to a constraints violation. 

896 

897 Raises 

898 ------ 

899 TypeError 

900 Raised if the two datastores are not compatible. 

901 """ 

902 if type(self) is not type(source_datastore): 

903 raise TypeError( 

904 f"Datastore mismatch between this datastore ({type(self)}) and the " 

905 f"source datastore ({type(source_datastore)})." 

906 ) 

907 

908 raise NotImplementedError(f"Datastore {type(self)} must implement a transfer_from method.") 

909 

910 def getManyURIs( 

911 self, 

912 refs: Iterable[DatasetRef], 

913 predict: bool = False, 

914 allow_missing: bool = False, 

915 ) -> dict[DatasetRef, DatasetRefURIs]: 

916 """Return URIs associated with many datasets. 

917 

918 Parameters 

919 ---------- 

920 refs : iterable of `DatasetIdRef` 

921 References to the required datasets. 

922 predict : `bool`, optional 

923 If `True`, allow URIs to be returned of datasets that have not 

924 been written. 

925 allow_missing : `bool` 

926 If `False`, and ``predict`` is `False`, will raise if a 

927 `DatasetRef` does not exist. 

928 

929 Returns 

930 ------- 

931 URIs : `dict` of [`DatasetRef`, `DatasetRefUris`] 

932 A dict of primary and component URIs, indexed by the passed-in 

933 refs. 

934 

935 Raises 

936 ------ 

937 FileNotFoundError 

938 A URI has been requested for a dataset that does not exist and 

939 guessing is not allowed. 

940 

941 Notes 

942 ----- 

943 In file-based datastores, getManyURIs does not check that the file is 

944 really there, it's assuming it is if datastore is aware of the file 

945 then it actually exists. 

946 """ 

947 uris: dict[DatasetRef, DatasetRefURIs] = {} 

948 missing_refs = [] 

949 for ref in refs: 

950 try: 

951 uris[ref] = self.getURIs(ref, predict=predict) 

952 except FileNotFoundError: 

953 missing_refs.append(ref) 

954 if missing_refs and not allow_missing: 

955 raise FileNotFoundError( 

956 "Missing {} refs from datastore out of {} and predict=False.".format( 

957 num_missing := len(missing_refs), num_missing + len(uris) 

958 ) 

959 ) 

960 return uris 

961 

962 @abstractmethod 

963 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs: 

964 """Return URIs associated with dataset. 

965 

966 Parameters 

967 ---------- 

968 datasetRef : `DatasetRef` 

969 Reference to the required dataset. 

970 predict : `bool`, optional 

971 If the datastore does not know about the dataset, controls whether 

972 it should return a predicted URI or not. 

973 

974 Returns 

975 ------- 

976 uris : `DatasetRefURIs` 

977 The URI to the primary artifact associated with this dataset (if 

978 the dataset was disassembled within the datastore this may be 

979 `None`), and the URIs to any components associated with the dataset 

980 artifact. (can be empty if there are no components). 

981 """ 

982 raise NotImplementedError() 

983 

984 @abstractmethod 

985 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath: 

986 """URI to the Dataset. 

987 

988 Parameters 

989 ---------- 

990 datasetRef : `DatasetRef` 

991 Reference to the required Dataset. 

992 predict : `bool` 

993 If `True` attempt to predict the URI for a dataset if it does 

994 not exist in datastore. 

995 

996 Returns 

997 ------- 

998 uri : `str` 

999 URI string pointing to the Dataset within the datastore. If the 

1000 Dataset does not exist in the datastore, the URI may be a guess. 

1001 If the datastore does not have entities that relate well 

1002 to the concept of a URI the returned URI string will be 

1003 descriptive. The returned URI is not guaranteed to be obtainable. 

1004 

1005 Raises 

1006 ------ 

1007 FileNotFoundError 

1008 A URI has been requested for a dataset that does not exist and 

1009 guessing is not allowed. 

1010 """ 

1011 raise NotImplementedError("Must be implemented by subclass") 

1012 

1013 @abstractmethod 

1014 def retrieveArtifacts( 

1015 self, 

1016 refs: Iterable[DatasetRef], 

1017 destination: ResourcePath, 

1018 transfer: str = "auto", 

1019 preserve_path: bool = True, 

1020 overwrite: bool = False, 

1021 ) -> list[ResourcePath]: 

1022 """Retrieve the artifacts associated with the supplied refs. 

1023 

1024 Parameters 

1025 ---------- 

1026 refs : iterable of `DatasetRef` 

1027 The datasets for which artifacts are to be retrieved. 

1028 A single ref can result in multiple artifacts. The refs must 

1029 be resolved. 

1030 destination : `lsst.resources.ResourcePath` 

1031 Location to write the artifacts. 

1032 transfer : `str`, optional 

1033 Method to use to transfer the artifacts. Must be one of the options 

1034 supported by `lsst.resources.ResourcePath.transfer_from()`. 

1035 "move" is not allowed. 

1036 preserve_path : `bool`, optional 

1037 If `True` the full path of the artifact within the datastore 

1038 is preserved. If `False` the final file component of the path 

1039 is used. 

1040 overwrite : `bool`, optional 

1041 If `True` allow transfers to overwrite existing files at the 

1042 destination. 

1043 

1044 Returns 

1045 ------- 

1046 targets : `list` of `lsst.resources.ResourcePath` 

1047 URIs of file artifacts in destination location. Order is not 

1048 preserved. 

1049 

1050 Notes 

1051 ----- 

1052 For non-file datastores the artifacts written to the destination 

1053 may not match the representation inside the datastore. For example 

1054 a hierarchichal data structure in a NoSQL database may well be stored 

1055 as a JSON file. 

1056 """ 

1057 raise NotImplementedError() 

1058 

1059 @abstractmethod 

1060 def remove(self, datasetRef: DatasetRef) -> None: 

1061 """Indicate to the Datastore that a Dataset can be removed. 

1062 

1063 Parameters 

1064 ---------- 

1065 datasetRef : `DatasetRef` 

1066 Reference to the required Dataset. 

1067 

1068 Raises 

1069 ------ 

1070 FileNotFoundError 

1071 When Dataset does not exist. 

1072 

1073 Notes 

1074 ----- 

1075 Some Datastores may implement this method as a silent no-op to 

1076 disable Dataset deletion through standard interfaces. 

1077 """ 

1078 raise NotImplementedError("Must be implemented by subclass") 

1079 

1080 @abstractmethod 

1081 def forget(self, refs: Iterable[DatasetRef]) -> None: 

1082 """Indicate to the Datastore that it should remove all records of the 

1083 given datasets, without actually deleting them. 

1084 

1085 Parameters 

1086 ---------- 

1087 refs : `~collections.abc.Iterable` [ `DatasetRef` ] 

1088 References to the datasets being forgotten. 

1089 

1090 Notes 

1091 ----- 

1092 Asking a datastore to forget a `DatasetRef` it does not hold should be 

1093 a silent no-op, not an error. 

1094 """ 

1095 raise NotImplementedError("Must be implemented by subclass") 

1096 

1097 @abstractmethod 

1098 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None: 

1099 """Indicate to the Datastore that a Dataset can be moved to the trash. 

1100 

1101 Parameters 

1102 ---------- 

1103 ref : `DatasetRef` or iterable thereof 

1104 Reference(s) to the required Dataset. 

1105 ignore_errors : `bool`, optional 

1106 Determine whether errors should be ignored. When multiple 

1107 refs are being trashed there will be no per-ref check. 

1108 

1109 Raises 

1110 ------ 

1111 FileNotFoundError 

1112 When Dataset does not exist and errors are not ignored. Only 

1113 checked if a single ref is supplied (and not in a list). 

1114 

1115 Notes 

1116 ----- 

1117 Some Datastores may implement this method as a silent no-op to 

1118 disable Dataset deletion through standard interfaces. 

1119 """ 

1120 raise NotImplementedError("Must be implemented by subclass") 

1121 

1122 @abstractmethod 

1123 def emptyTrash(self, ignore_errors: bool = True) -> None: 

1124 """Remove all datasets from the trash. 

1125 

1126 Parameters 

1127 ---------- 

1128 ignore_errors : `bool`, optional 

1129 Determine whether errors should be ignored. 

1130 

1131 Notes 

1132 ----- 

1133 Some Datastores may implement this method as a silent no-op to 

1134 disable Dataset deletion through standard interfaces. 

1135 """ 

1136 raise NotImplementedError("Must be implemented by subclass") 

1137 

1138 @abstractmethod 

1139 def transfer(self, inputDatastore: Datastore, datasetRef: DatasetRef) -> None: 

1140 """Transfer a dataset from another datastore to this datastore. 

1141 

1142 Parameters 

1143 ---------- 

1144 inputDatastore : `Datastore` 

1145 The external `Datastore` from which to retrieve the Dataset. 

1146 datasetRef : `DatasetRef` 

1147 Reference to the required Dataset. 

1148 """ 

1149 raise NotImplementedError("Must be implemented by subclass") 

1150 

1151 def export( 

1152 self, 

1153 refs: Iterable[DatasetRef], 

1154 *, 

1155 directory: ResourcePathExpression | None = None, 

1156 transfer: str | None = "auto", 

1157 ) -> Iterable[FileDataset]: 

1158 """Export datasets for transfer to another data repository. 

1159 

1160 Parameters 

1161 ---------- 

1162 refs : iterable of `DatasetRef` 

1163 Dataset references to be exported. 

1164 directory : `str`, optional 

1165 Path to a directory that should contain files corresponding to 

1166 output datasets. Ignored if ``transfer`` is explicitly `None`. 

1167 transfer : `str`, optional 

1168 Mode that should be used to move datasets out of the repository. 

1169 Valid options are the same as those of the ``transfer`` argument 

1170 to ``ingest``, and datastores may similarly signal that a transfer 

1171 mode is not supported by raising `NotImplementedError`. If "auto" 

1172 is given and no ``directory`` is specified, `None` will be 

1173 implied. 

1174 

1175 Returns 

1176 ------- 

1177 dataset : iterable of `DatasetTransfer` 

1178 Structs containing information about the exported datasets, in the 

1179 same order as ``refs``. 

1180 

1181 Raises 

1182 ------ 

1183 NotImplementedError 

1184 Raised if the given transfer mode is not supported. 

1185 """ 

1186 raise NotImplementedError(f"Transfer mode {transfer} not supported.") 

1187 

1188 @abstractmethod 

1189 def validateConfiguration( 

1190 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False 

1191 ) -> None: 

1192 """Validate some of the configuration for this datastore. 

1193 

1194 Parameters 

1195 ---------- 

1196 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass` 

1197 Entities to test against this configuration. Can be differing 

1198 types. 

1199 logFailures : `bool`, optional 

1200 If `True`, output a log message for every validation error 

1201 detected. 

1202 

1203 Raises 

1204 ------ 

1205 DatastoreValidationError 

1206 Raised if there is a validation problem with a configuration. 

1207 

1208 Notes 

1209 ----- 

1210 Which parts of the configuration are validated is at the discretion 

1211 of each Datastore implementation. 

1212 """ 

1213 raise NotImplementedError("Must be implemented by subclass") 

1214 

1215 @abstractmethod 

1216 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None: 

1217 """Validate a specific look up key with supplied entity. 

1218 

1219 Parameters 

1220 ---------- 

1221 lookupKey : `LookupKey` 

1222 Key to use to retrieve information from the datastore 

1223 configuration. 

1224 entity : `DatasetRef`, `DatasetType`, or `StorageClass` 

1225 Entity to compare with configuration retrieved using the 

1226 specified lookup key. 

1227 

1228 Raises 

1229 ------ 

1230 DatastoreValidationError 

1231 Raised if there is a problem with the combination of entity 

1232 and lookup key. 

1233 

1234 Notes 

1235 ----- 

1236 Bypasses the normal selection priorities by allowing a key that 

1237 would normally not be selected to be validated. 

1238 """ 

1239 raise NotImplementedError("Must be implemented by subclass") 

1240 

1241 @abstractmethod 

1242 def getLookupKeys(self) -> set[LookupKey]: 

1243 """Return all the lookup keys relevant to this datastore. 

1244 

1245 Returns 

1246 ------- 

1247 keys : `set` of `LookupKey` 

1248 The keys stored internally for looking up information based 

1249 on `DatasetType` name or `StorageClass`. 

1250 """ 

1251 raise NotImplementedError("Must be implemented by subclass") 

1252 

1253 def needs_expanded_data_ids( 

1254 self, 

1255 transfer: str | None, 

1256 entity: DatasetRef | DatasetType | StorageClass | None = None, 

1257 ) -> bool: 

1258 """Test whether this datastore needs expanded data IDs to ingest. 

1259 

1260 Parameters 

1261 ---------- 

1262 transfer : `str` or `None` 

1263 Transfer mode for ingest. 

1264 entity : `DatasetRef` or `DatasetType` or `StorageClass` or `None`, \ 

1265 optional 

1266 Object representing what will be ingested. If not provided (or not 

1267 specific enough), `True` may be returned even if expanded data 

1268 IDs aren't necessary. 

1269 

1270 Returns 

1271 ------- 

1272 needed : `bool` 

1273 If `True`, expanded data IDs may be needed. `False` only if 

1274 expansion definitely isn't necessary. 

1275 """ 

1276 return True 

1277 

1278 @abstractmethod 

1279 def import_records( 

1280 self, 

1281 data: Mapping[str, DatastoreRecordData], 

1282 ) -> None: 

1283 """Import datastore location and record data from an in-memory data 

1284 structure. 

1285 

1286 Parameters 

1287 ---------- 

1288 data : `~collections.abc.Mapping` [ `str`, `DatastoreRecordData` ] 

1289 Datastore records indexed by datastore name. May contain data for 

1290 other `Datastore` instances (generally because they are chained to 

1291 this one), which should be ignored. 

1292 

1293 Notes 

1294 ----- 

1295 Implementations should generally not check that any external resources 

1296 (e.g. files) referred to by these records actually exist, for 

1297 performance reasons; we expect higher-level code to guarantee that they 

1298 do. 

1299 

1300 Implementations are responsible for calling 

1301 `DatastoreRegistryBridge.insert` on all datasets in ``data.locations`` 

1302 where the key is in `names`, as well as loading any opaque table data. 

1303 

1304 Implementations may assume that datasets are either fully present or 

1305 not at all (single-component exports are not permitted). 

1306 """ 

1307 raise NotImplementedError() 

1308 

1309 @abstractmethod 

1310 def export_records( 

1311 self, 

1312 refs: Iterable[DatasetIdRef], 

1313 ) -> Mapping[str, DatastoreRecordData]: 

1314 """Export datastore records and locations to an in-memory data 

1315 structure. 

1316 

1317 Parameters 

1318 ---------- 

1319 refs : `~collections.abc.Iterable` [ `DatasetIdRef` ] 

1320 Datasets to save. This may include datasets not known to this 

1321 datastore, which should be ignored. May not include component 

1322 datasets. 

1323 

1324 Returns 

1325 ------- 

1326 data : `~collections.abc.Mapping` [ `str`, `DatastoreRecordData` ] 

1327 Exported datastore records indexed by datastore name. 

1328 """ 

1329 raise NotImplementedError() 

1330 

1331 def set_retrieve_dataset_type_method(self, method: Callable[[str], DatasetType | None] | None) -> None: 

1332 """Specify a method that can be used by datastore to retrieve 

1333 registry-defined dataset type. 

1334 

1335 Parameters 

1336 ---------- 

1337 method : `~collections.abc.Callable` | `None` 

1338 Method that takes a name of the dataset type and returns a 

1339 corresponding `DatasetType` instance as defined in Registry. If 

1340 dataset type name is not known to registry `None` is returned. 

1341 

1342 Notes 

1343 ----- 

1344 This method is only needed for a Datastore supporting a "trusted" mode 

1345 when it does not have an access to datastore records and needs to 

1346 guess dataset location based on its stored dataset type. 

1347 """ 

1348 pass 

1349 

1350 @abstractmethod 

1351 def get_opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]: 

1352 """Make definitions of the opaque tables used by this Datastore. 

1353 

1354 Returns 

1355 ------- 

1356 tables : `~collections.abc.Mapping` [ `str`, `.ddl.TableSpec` ] 

1357 Mapping of opaque table names to their definitions. This can be an 

1358 empty mapping if Datastore does not use opaque tables to keep 

1359 datastore records. 

1360 """ 

1361 raise NotImplementedError() 

1362 

1363 

1364class NullDatastore(Datastore): 

1365 """A datastore that implements the `Datastore` API but always fails when 

1366 it accepts any request. 

1367 

1368 Parameters 

1369 ---------- 

1370 config : `Config` or `~lsst.resources.ResourcePathExpression` or `None` 

1371 Ignored. 

1372 bridgeManager : `DatastoreRegistryBridgeManager` or `None` 

1373 Ignored. 

1374 butlerRoot : `~lsst.resources.ResourcePathExpression` or `None` 

1375 Ignored. 

1376 """ 

1377 

1378 @classmethod 

1379 def _create_from_config( 

1380 cls, 

1381 config: Config, 

1382 bridgeManager: DatastoreRegistryBridgeManager, 

1383 butlerRoot: ResourcePathExpression | None = None, 

1384 ) -> NullDatastore: 

1385 return NullDatastore(config, bridgeManager, butlerRoot) 

1386 

1387 def clone(self, bridgeManager: DatastoreRegistryBridgeManager) -> Datastore: 

1388 return self 

1389 

1390 @classmethod 

1391 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: 

1392 # Nothing to do. This is not a real Datastore. 

1393 pass 

1394 

1395 def __init__( 

1396 self, 

1397 config: Config | ResourcePathExpression | None, 

1398 bridgeManager: DatastoreRegistryBridgeManager | None, 

1399 butlerRoot: ResourcePathExpression | None = None, 

1400 ): 

1401 # Name ourselves with the timestamp the datastore 

1402 # was created. 

1403 self.name = f"{type(self).__name__}@{time.time()}" 

1404 _LOG.debug("Creating datastore %s", self.name) 

1405 

1406 return 

1407 

1408 def knows(self, ref: DatasetRef) -> bool: 

1409 return False 

1410 

1411 def exists(self, datasetRef: DatasetRef) -> bool: 

1412 return False 

1413 

1414 def get( 

1415 self, 

1416 datasetRef: DatasetRef, 

1417 parameters: Mapping[str, Any] | None = None, 

1418 storageClass: StorageClass | str | None = None, 

1419 ) -> Any: 

1420 raise FileNotFoundError("This is a no-op datastore that can not access a real datastore") 

1421 

1422 def put(self, inMemoryDataset: Any, datasetRef: DatasetRef) -> None: 

1423 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1424 

1425 def put_new(self, in_memory_dataset: Any, ref: DatasetRef) -> Mapping[str, DatasetRef]: 

1426 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1427 

1428 def ingest( 

1429 self, *datasets: FileDataset, transfer: str | None = None, record_validation_info: bool = True 

1430 ) -> None: 

1431 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1432 

1433 def transfer_from( 

1434 self, 

1435 source_datastore: Datastore, 

1436 refs: Iterable[DatasetRef], 

1437 transfer: str = "auto", 

1438 artifact_existence: dict[ResourcePath, bool] | None = None, 

1439 dry_run: bool = False, 

1440 ) -> tuple[set[DatasetRef], set[DatasetRef]]: 

1441 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1442 

1443 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs: 

1444 raise FileNotFoundError("This is a no-op datastore that can not access a real datastore") 

1445 

1446 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath: 

1447 raise FileNotFoundError("This is a no-op datastore that can not access a real datastore") 

1448 

1449 def retrieveArtifacts( 

1450 self, 

1451 refs: Iterable[DatasetRef], 

1452 destination: ResourcePath, 

1453 transfer: str = "auto", 

1454 preserve_path: bool = True, 

1455 overwrite: bool = False, 

1456 ) -> list[ResourcePath]: 

1457 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1458 

1459 def remove(self, datasetRef: DatasetRef) -> None: 

1460 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1461 

1462 def forget(self, refs: Iterable[DatasetRef]) -> None: 

1463 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1464 

1465 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None: 

1466 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1467 

1468 def emptyTrash(self, ignore_errors: bool = True) -> None: 

1469 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1470 

1471 def transfer(self, inputDatastore: Datastore, datasetRef: DatasetRef) -> None: 

1472 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1473 

1474 def export( 

1475 self, 

1476 refs: Iterable[DatasetRef], 

1477 *, 

1478 directory: ResourcePathExpression | None = None, 

1479 transfer: str | None = "auto", 

1480 ) -> Iterable[FileDataset]: 

1481 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1482 

1483 def validateConfiguration( 

1484 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False 

1485 ) -> None: 

1486 # No configuration so always validates. 

1487 pass 

1488 

1489 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None: 

1490 pass 

1491 

1492 def getLookupKeys(self) -> set[LookupKey]: 

1493 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1494 

1495 def import_records( 

1496 self, 

1497 data: Mapping[str, DatastoreRecordData], 

1498 ) -> None: 

1499 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1500 

1501 def export_records( 

1502 self, 

1503 refs: Iterable[DatasetIdRef], 

1504 ) -> Mapping[str, DatastoreRecordData]: 

1505 raise NotImplementedError("This is a no-op datastore that can not access a real datastore") 

1506 

1507 def get_opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]: 

1508 return {}