Coverage for python/lsst/daf/butler/datastore/_datastore.py: 64%
275 statements
« prev ^ index » next coverage.py v7.5.0, created at 2024-04-30 09:54 +0000
« prev ^ index » next coverage.py v7.5.0, created at 2024-04-30 09:54 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Support for generic data stores."""
30from __future__ import annotations
32__all__ = (
33 "DatasetRefURIs",
34 "Datastore",
35 "DatastoreConfig",
36 "DatastoreOpaqueTable",
37 "DatastoreValidationError",
38 "NullDatastore",
39 "DatastoreTransaction",
40)
42import contextlib
43import dataclasses
44import logging
45import time
46from abc import ABCMeta, abstractmethod
47from collections import abc, defaultdict
48from collections.abc import Callable, Collection, Iterable, Iterator, Mapping
49from typing import TYPE_CHECKING, Any, ClassVar
51from lsst.utils import doImportType
53from .._config import Config, ConfigSubset
54from .._exceptions import DatasetTypeNotSupportedError, ValidationError
55from .._file_dataset import FileDataset
56from .._storage_class import StorageClassFactory
57from .constraints import Constraints
59if TYPE_CHECKING:
60 from lsst.resources import ResourcePath, ResourcePathExpression
62 from .. import ddl
63 from .._config_support import LookupKey
64 from .._dataset_ref import DatasetRef
65 from .._dataset_type import DatasetType
66 from .._storage_class import StorageClass
67 from ..registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager
68 from .record_data import DatastoreRecordData
69 from .stored_file_info import StoredDatastoreItemInfo
71_LOG = logging.getLogger(__name__)
74class DatastoreConfig(ConfigSubset):
75 """Configuration for Datastores."""
77 component = "datastore"
78 requiredKeys = ("cls",)
79 defaultConfigFile = "datastore.yaml"
82class DatastoreValidationError(ValidationError):
83 """There is a problem with the Datastore configuration."""
85 pass
88@dataclasses.dataclass(frozen=True)
89class Event:
90 """Representation of an event that can be rolled back."""
92 __slots__ = {"name", "undoFunc", "args", "kwargs"}
93 name: str
94 undoFunc: Callable
95 args: tuple
96 kwargs: dict
99@dataclasses.dataclass(frozen=True)
100class DatastoreOpaqueTable:
101 """Definition of the opaque table which stores datastore records.
103 Table definition contains `.ddl.TableSpec` for a table and a class
104 of a record which must be a subclass of `StoredDatastoreItemInfo`.
105 """
107 __slots__ = {"table_spec", "record_class"}
108 table_spec: ddl.TableSpec
109 record_class: type[StoredDatastoreItemInfo]
112class IngestPrepData:
113 """A helper base class for `Datastore` ingest implementations.
115 Datastore implementations will generally need a custom implementation of
116 this class.
118 Should be accessed as ``Datastore.IngestPrepData`` instead of via direct
119 import.
121 Parameters
122 ----------
123 refs : iterable of `DatasetRef`
124 References for the datasets that can be ingested by this datastore.
125 """
127 def __init__(self, refs: Iterable[DatasetRef]):
128 self.refs = {ref.id: ref for ref in refs}
131class DatastoreTransaction:
132 """Keeps a log of `Datastore` activity and allow rollback.
134 Parameters
135 ----------
136 parent : `DatastoreTransaction`, optional
137 The parent transaction (if any).
138 """
140 Event: ClassVar[type] = Event
142 parent: DatastoreTransaction | None
143 """The parent transaction. (`DatastoreTransaction`, optional)"""
145 def __init__(self, parent: DatastoreTransaction | None = None):
146 self.parent = parent
147 self._log: list[Event] = []
149 def registerUndo(self, name: str, undoFunc: Callable, *args: Any, **kwargs: Any) -> None:
150 """Register event with undo function.
152 Parameters
153 ----------
154 name : `str`
155 Name of the event.
156 undoFunc : `~collections.abc.Callable`
157 Function to undo this event.
158 *args : `tuple`
159 Positional arguments to ``undoFunc``.
160 **kwargs
161 Keyword arguments to ``undoFunc``.
162 """
163 self._log.append(self.Event(name, undoFunc, args, kwargs))
165 @contextlib.contextmanager
166 def undoWith(self, name: str, undoFunc: Callable, *args: Any, **kwargs: Any) -> Iterator[None]:
167 """Register undo function if nested operation succeeds.
169 Calls `registerUndo`.
171 This can be used to wrap individual undo-able statements within a
172 DatastoreTransaction block. Multiple statements that can fail
173 separately should not be part of the same `undoWith` block.
175 All arguments are forwarded directly to `registerUndo`.
177 Parameters
178 ----------
179 name : `str`
180 The name to associate with this event.
181 undoFunc : `~collections.abc.Callable`
182 Function to undo this event.
183 *args : `tuple`
184 Positional arguments for ``undoFunc``.
185 **kwargs : `typing.Any`
186 Keyword arguments for ``undoFunc``.
187 """
188 try:
189 yield None
190 except BaseException:
191 raise
192 else:
193 self.registerUndo(name, undoFunc, *args, **kwargs)
195 def rollback(self) -> None:
196 """Roll back all events in this transaction."""
197 log = logging.getLogger(__name__)
198 while self._log:
199 ev = self._log.pop()
200 try:
201 log.debug(
202 "Rolling back transaction: %s: %s(%s,%s)",
203 ev.name,
204 ev.undoFunc,
205 ",".join(str(a) for a in ev.args),
206 ",".join(f"{k}={v}" for k, v in ev.kwargs.items()),
207 )
208 except Exception:
209 # In case we had a problem in stringification of arguments
210 log.warning("Rolling back transaction: %s", ev.name)
211 try:
212 ev.undoFunc(*ev.args, **ev.kwargs)
213 except BaseException as e:
214 # Deliberately swallow error that may occur in unrolling
215 log.warning("Exception: %s caught while unrolling: %s", e, ev.name)
216 pass
218 def commit(self) -> None:
219 """Commit this transaction."""
220 if self.parent is None:
221 # Just forget about the events, they have already happened.
222 return
223 else:
224 # We may still want to events from this transaction as part of
225 # the parent.
226 self.parent._log.extend(self._log)
229@dataclasses.dataclass
230class DatasetRefURIs(abc.Sequence):
231 """Represents the primary and component ResourcePath(s) associated with a
232 DatasetRef.
234 This is used in places where its members used to be represented as a tuple
235 `(primaryURI, componentURIs)`. To maintain backward compatibility this
236 inherits from Sequence and so instances can be treated as a two-item
237 tuple.
239 Parameters
240 ----------
241 primaryURI : `lsst.resources.ResourcePath` or `None`, optional
242 The URI to the primary artifact associated with this dataset. If the
243 dataset was disassembled within the datastore this may be `None`.
244 componentURIs : `dict` [`str`, `~lsst.resources.ResourcePath`] or `None`
245 The URIs to any components associated with the dataset artifact
246 indexed by component name. This can be empty if there are no
247 components.
248 """
250 def __init__(
251 self,
252 primaryURI: ResourcePath | None = None,
253 componentURIs: dict[str, ResourcePath] | None = None,
254 ):
255 self.primaryURI = primaryURI
256 self.componentURIs = componentURIs or {}
258 def __getitem__(self, index: Any) -> Any:
259 """Get primaryURI and componentURIs by index.
261 Provides support for tuple-like access.
262 """
263 if index == 0:
264 return self.primaryURI
265 elif index == 1:
266 return self.componentURIs
267 raise IndexError("list index out of range")
269 def __len__(self) -> int:
270 """Get the number of data members.
272 Provides support for tuple-like access.
273 """
274 return 2
276 def __repr__(self) -> str:
277 return f"DatasetRefURIs({repr(self.primaryURI)}, {repr(self.componentURIs)})"
280class Datastore(metaclass=ABCMeta):
281 """Datastore interface.
283 Parameters
284 ----------
285 config : `DatastoreConfig` or `str`
286 Load configuration either from an existing config instance or by
287 referring to a configuration file.
288 bridgeManager : `DatastoreRegistryBridgeManager`
289 Object that manages the interface between `Registry` and datastores.
291 See Also
292 --------
293 lsst.daf.butler.Butler
294 """
296 defaultConfigFile: ClassVar[str | None] = None
297 """Path to configuration defaults. Accessed within the ``config`` resource
298 or relative to a search path. Can be None if no defaults specified.
299 """
301 containerKey: ClassVar[str | None] = None
302 """Name of the key containing a list of subconfigurations that also
303 need to be merged with defaults and will likely use different Python
304 datastore classes (but all using DatastoreConfig). Assumed to be a
305 list of configurations that can be represented in a DatastoreConfig
306 and containing a "cls" definition. None indicates that no containers
307 are expected in this Datastore."""
309 isEphemeral: bool = False
310 """Indicate whether this Datastore is ephemeral or not. An ephemeral
311 datastore is one where the contents of the datastore will not exist
312 across process restarts. This value can change per-instance."""
314 config: DatastoreConfig
315 """Configuration used to create Datastore."""
317 name: str
318 """Label associated with this Datastore."""
320 storageClassFactory: StorageClassFactory
321 """Factory for creating storage class instances from name."""
323 constraints: Constraints
324 """Constraints to apply when putting datasets into the datastore."""
326 # MyPy does not like for this to be annotated as any kind of type, because
327 # it can't do static checking on type variables that can change at runtime.
328 IngestPrepData: ClassVar[Any] = IngestPrepData
329 """Helper base class for ingest implementations.
330 """
332 @classmethod
333 @abstractmethod
334 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None:
335 """Set filesystem-dependent config options for this datastore.
337 The options will be appropriate for a new empty repository with the
338 given root.
340 Parameters
341 ----------
342 root : `str`
343 Filesystem path to the root of the data repository.
344 config : `Config`
345 A `Config` to update. Only the subset understood by
346 this component will be updated. Will not expand
347 defaults.
348 full : `Config`
349 A complete config with all defaults expanded that can be
350 converted to a `DatastoreConfig`. Read-only and will not be
351 modified by this method.
352 Repository-specific options that should not be obtained
353 from defaults when Butler instances are constructed
354 should be copied from ``full`` to ``config``.
355 overwrite : `bool`, optional
356 If `False`, do not modify a value in ``config`` if the value
357 already exists. Default is always to overwrite with the provided
358 ``root``.
360 Notes
361 -----
362 If a keyword is explicitly defined in the supplied ``config`` it
363 will not be overridden by this method if ``overwrite`` is `False`.
364 This allows explicit values set in external configs to be retained.
365 """
366 raise NotImplementedError()
368 @staticmethod
369 def fromConfig(
370 config: Config,
371 bridgeManager: DatastoreRegistryBridgeManager,
372 butlerRoot: ResourcePathExpression | None = None,
373 ) -> Datastore:
374 """Create datastore from type specified in config file.
376 Parameters
377 ----------
378 config : `Config` or `~lsst.resources.ResourcePathExpression`
379 Configuration instance.
380 bridgeManager : `DatastoreRegistryBridgeManager`
381 Object that manages the interface between `Registry` and
382 datastores.
383 butlerRoot : `str`, optional
384 Butler root directory.
385 """
386 config = DatastoreConfig(config)
387 cls = doImportType(config["cls"])
388 if not issubclass(cls, Datastore):
389 raise TypeError(f"Imported child class {config['cls']} is not a Datastore")
390 return cls._create_from_config(config=config, bridgeManager=bridgeManager, butlerRoot=butlerRoot)
392 def __init__(
393 self,
394 config: DatastoreConfig,
395 bridgeManager: DatastoreRegistryBridgeManager,
396 ):
397 self.config = config
398 self.name = "ABCDataStore"
399 self._transaction: DatastoreTransaction | None = None
401 # All Datastores need storage classes and constraints
402 self.storageClassFactory = StorageClassFactory()
404 # And read the constraints list
405 constraintsConfig = self.config.get("constraints")
406 self.constraints = Constraints(constraintsConfig, universe=bridgeManager.universe)
408 @classmethod
409 @abstractmethod
410 def _create_from_config(
411 cls,
412 config: DatastoreConfig,
413 bridgeManager: DatastoreRegistryBridgeManager,
414 butlerRoot: ResourcePathExpression | None,
415 ) -> Datastore:
416 """`Datastore`.``fromConfig`` calls this to instantiate Datastore
417 subclasses. This is the primary constructor for the individual
418 Datastore subclasses.
419 """
420 raise NotImplementedError()
422 @abstractmethod
423 def clone(self, bridgeManager: DatastoreRegistryBridgeManager) -> Datastore:
424 """Make an independent copy of this Datastore with a different
425 `DatastoreRegistryBridgeManager` instance.
427 Parameters
428 ----------
429 bridgeManager : `DatastoreRegistryBridgeManager`
430 New `DatastoreRegistryBridgeManager` object to use when
431 instantiating managers.
433 Returns
434 -------
435 datastore : `Datastore`
436 New `Datastore` instance with the same configuration as the
437 existing instance.
438 """
439 raise NotImplementedError()
441 def __str__(self) -> str:
442 return self.name
444 def __repr__(self) -> str:
445 return self.name
447 @property
448 def names(self) -> tuple[str, ...]:
449 """Names associated with this datastore returned as a list.
451 Can be different to ``name`` for a chaining datastore.
452 """
453 # Default implementation returns solely the name itself
454 return (self.name,)
456 @property
457 def roots(self) -> dict[str, ResourcePath | None]:
458 """Return the root URIs for each named datastore.
460 Mapping from datastore name to root URI. The URI can be `None`
461 if a datastore has no concept of a root URI.
462 (`dict` [`str`, `ResourcePath` | `None`])
463 """
464 return {self.name: None}
466 @contextlib.contextmanager
467 def transaction(self) -> Iterator[DatastoreTransaction]:
468 """Context manager supporting `Datastore` transactions.
470 Transactions can be nested, and are to be used in combination with
471 `Registry.transaction`.
472 """
473 self._transaction = DatastoreTransaction(self._transaction)
474 try:
475 yield self._transaction
476 except BaseException:
477 self._transaction.rollback()
478 raise
479 else:
480 self._transaction.commit()
481 self._transaction = self._transaction.parent
483 def _set_trust_mode(self, mode: bool) -> None:
484 """Set the trust mode for this datastore.
486 Parameters
487 ----------
488 mode : `bool`
489 If `True`, get requests will be attempted even if the datastore
490 does not know about the dataset.
492 Notes
493 -----
494 This is a private method to indicate that trust mode might be a
495 transitory property that we do not want to make fully public. For now
496 only a `~lsst.daf.butler.datastores.FileDatastore` understands this
497 concept. By default this method does nothing.
498 """
499 return
501 @abstractmethod
502 def knows(self, ref: DatasetRef) -> bool:
503 """Check if the dataset is known to the datastore.
505 Does not check for existence of any artifact.
507 Parameters
508 ----------
509 ref : `DatasetRef`
510 Reference to the required dataset.
512 Returns
513 -------
514 exists : `bool`
515 `True` if the dataset is known to the datastore.
516 """
517 raise NotImplementedError()
519 def knows_these(self, refs: Iterable[DatasetRef]) -> dict[DatasetRef, bool]:
520 """Check which of the given datasets are known to this datastore.
522 This is like ``mexist()`` but does not check that the file exists.
524 Parameters
525 ----------
526 refs : iterable `DatasetRef`
527 The datasets to check.
529 Returns
530 -------
531 exists : `dict`[`DatasetRef`, `bool`]
532 Mapping of dataset to boolean indicating whether the dataset
533 is known to the datastore.
534 """
535 # Non-optimized default calls knows() repeatedly.
536 return {ref: self.knows(ref) for ref in refs}
538 def mexists(
539 self, refs: Iterable[DatasetRef], artifact_existence: dict[ResourcePath, bool] | None = None
540 ) -> dict[DatasetRef, bool]:
541 """Check the existence of multiple datasets at once.
543 Parameters
544 ----------
545 refs : iterable of `DatasetRef`
546 The datasets to be checked.
547 artifact_existence : `dict` [`lsst.resources.ResourcePath`, `bool`]
548 Optional mapping of datastore artifact to existence. Updated by
549 this method with details of all artifacts tested. Can be `None`
550 if the caller is not interested.
552 Returns
553 -------
554 existence : `dict` of [`DatasetRef`, `bool`]
555 Mapping from dataset to boolean indicating existence.
556 """
557 existence: dict[DatasetRef, bool] = {}
558 # Non-optimized default.
559 for ref in refs:
560 existence[ref] = self.exists(ref)
561 return existence
563 @abstractmethod
564 def exists(self, datasetRef: DatasetRef) -> bool:
565 """Check if the dataset exists in the datastore.
567 Parameters
568 ----------
569 datasetRef : `DatasetRef`
570 Reference to the required dataset.
572 Returns
573 -------
574 exists : `bool`
575 `True` if the entity exists in the `Datastore`.
576 """
577 raise NotImplementedError("Must be implemented by subclass")
579 @abstractmethod
580 def get(
581 self,
582 datasetRef: DatasetRef,
583 parameters: Mapping[str, Any] | None = None,
584 storageClass: StorageClass | str | None = None,
585 ) -> Any:
586 """Load an `InMemoryDataset` from the store.
588 Parameters
589 ----------
590 datasetRef : `DatasetRef`
591 Reference to the required Dataset.
592 parameters : `dict`
593 `StorageClass`-specific parameters that specify a slice of the
594 Dataset to be loaded.
595 storageClass : `StorageClass` or `str`, optional
596 The storage class to be used to override the Python type
597 returned by this method. By default the returned type matches
598 the dataset type definition for this dataset. Specifying a
599 read `StorageClass` can force a different type to be returned.
600 This type must be compatible with the original type.
602 Returns
603 -------
604 inMemoryDataset : `object`
605 Requested Dataset or slice thereof as an InMemoryDataset.
606 """
607 raise NotImplementedError("Must be implemented by subclass")
609 def prepare_get_for_external_client(self, ref: DatasetRef) -> object | None:
610 """Retrieve serializable data that can be used to execute a ``get()``.
612 Parameters
613 ----------
614 ref : `DatasetRef`
615 Reference to the required dataset.
617 Returns
618 -------
619 payload : `object` | `None`
620 Serializable payload containing the information needed to perform a
621 get() operation. This payload may be sent over the wire to another
622 system to perform the get(). Returns `None` if the dataset is not
623 known to this datastore.
624 """
625 raise NotImplementedError()
627 @abstractmethod
628 def put(self, inMemoryDataset: Any, datasetRef: DatasetRef) -> None:
629 """Write a `InMemoryDataset` with a given `DatasetRef` to the store.
631 Parameters
632 ----------
633 inMemoryDataset : `object`
634 The Dataset to store.
635 datasetRef : `DatasetRef`
636 Reference to the associated Dataset.
637 """
638 raise NotImplementedError("Must be implemented by subclass")
640 @abstractmethod
641 def put_new(self, in_memory_dataset: Any, ref: DatasetRef) -> Mapping[str, DatasetRef]:
642 """Write a `InMemoryDataset` with a given `DatasetRef` to the store.
644 Parameters
645 ----------
646 in_memory_dataset : `object`
647 The Dataset to store.
648 ref : `DatasetRef`
649 Reference to the associated Dataset.
651 Returns
652 -------
653 datastore_refs : `~collections.abc.Mapping` [`str`, `DatasetRef`]
654 Mapping of a datastore name to dataset reference stored in that
655 datastore, reference will include datastore records. Only
656 non-ephemeral datastores will appear in this mapping.
657 """
658 raise NotImplementedError("Must be implemented by subclass")
660 def _overrideTransferMode(self, *datasets: FileDataset, transfer: str | None = None) -> str | None:
661 """Allow ingest transfer mode to be defaulted based on datasets.
663 Parameters
664 ----------
665 *datasets : `FileDataset`
666 Each positional argument is a struct containing information about
667 a file to be ingested, including its path (either absolute or
668 relative to the datastore root, if applicable), a complete
669 `DatasetRef` (with ``dataset_id not None``), and optionally a
670 formatter class or its fully-qualified string name. If a formatter
671 is not provided, this method should populate that attribute with
672 the formatter the datastore would use for `put`. Subclasses are
673 also permitted to modify the path attribute (typically to put it
674 in what the datastore considers its standard form).
675 transfer : `str`, optional
676 How (and whether) the dataset should be added to the datastore.
677 See `ingest` for details of transfer modes.
679 Returns
680 -------
681 newTransfer : `str`
682 Transfer mode to use. Will be identical to the supplied transfer
683 mode unless "auto" is used.
684 """
685 if transfer != "auto":
686 return transfer
687 raise RuntimeError(f"{transfer} is not allowed without specialization.")
689 def _prepIngest(self, *datasets: FileDataset, transfer: str | None = None) -> IngestPrepData:
690 """Process datasets to identify which ones can be ingested.
692 Parameters
693 ----------
694 *datasets : `FileDataset`
695 Each positional argument is a struct containing information about
696 a file to be ingested, including its path (either absolute or
697 relative to the datastore root, if applicable), a complete
698 `DatasetRef` (with ``dataset_id not None``), and optionally a
699 formatter class or its fully-qualified string name. If a formatter
700 is not provided, this method should populate that attribute with
701 the formatter the datastore would use for `put`. Subclasses are
702 also permitted to modify the path attribute (typically to put it
703 in what the datastore considers its standard form).
704 transfer : `str`, optional
705 How (and whether) the dataset should be added to the datastore.
706 See `ingest` for details of transfer modes.
708 Returns
709 -------
710 data : `IngestPrepData`
711 An instance of a subclass of `IngestPrepData`, used to pass
712 arbitrary data from `_prepIngest` to `_finishIngest`. This should
713 include only the datasets this datastore can actually ingest;
714 others should be silently ignored (`Datastore.ingest` will inspect
715 `IngestPrepData.refs` and raise `DatasetTypeNotSupportedError` if
716 necessary).
718 Raises
719 ------
720 NotImplementedError
721 Raised if the datastore does not support the given transfer mode
722 (including the case where ingest is not supported at all).
723 FileNotFoundError
724 Raised if one of the given files does not exist.
725 FileExistsError
726 Raised if transfer is not `None` but the (internal) location the
727 file would be moved to is already occupied.
729 Notes
730 -----
731 This method (along with `_finishIngest`) should be implemented by
732 subclasses to provide ingest support instead of implementing `ingest`
733 directly.
735 `_prepIngest` should not modify the data repository or given files in
736 any way; all changes should be deferred to `_finishIngest`.
738 When possible, exceptions should be raised in `_prepIngest` instead of
739 `_finishIngest`. `NotImplementedError` exceptions that indicate that
740 the transfer mode is not supported must be raised by `_prepIngest`
741 instead of `_finishIngest`.
742 """
743 raise NotImplementedError(f"Datastore {self} does not support direct file-based ingest.")
745 def _finishIngest(
746 self, prepData: IngestPrepData, *, transfer: str | None = None, record_validation_info: bool = True
747 ) -> None:
748 """Complete an ingest operation.
750 Parameters
751 ----------
752 prepData : `IngestPrepData`
753 An instance of a subclass of `IngestPrepData`. Guaranteed to be
754 the direct result of a call to `_prepIngest` on this datastore.
755 transfer : `str`, optional
756 How (and whether) the dataset should be added to the datastore.
757 See `ingest` for details of transfer modes.
758 record_validation_info : `bool`, optional
759 If `True`, the default, the datastore can record validation
760 information associated with the file. If `False` the datastore
761 will not attempt to track any information such as checksums
762 or file sizes. This can be useful if such information is tracked
763 in an external system or if the file is to be compressed in place.
764 It is up to the datastore whether this parameter is relevant.
766 Raises
767 ------
768 FileNotFoundError
769 Raised if one of the given files does not exist.
770 FileExistsError
771 Raised if transfer is not `None` but the (internal) location the
772 file would be moved to is already occupied.
774 Notes
775 -----
776 This method (along with `_prepIngest`) should be implemented by
777 subclasses to provide ingest support instead of implementing `ingest`
778 directly.
779 """
780 raise NotImplementedError(f"Datastore {self} does not support direct file-based ingest.")
782 def ingest(
783 self, *datasets: FileDataset, transfer: str | None = None, record_validation_info: bool = True
784 ) -> None:
785 """Ingest one or more files into the datastore.
787 Parameters
788 ----------
789 *datasets : `FileDataset`
790 Each positional argument is a struct containing information about
791 a file to be ingested, including its path (either absolute or
792 relative to the datastore root, if applicable), a complete
793 `DatasetRef` (with ``dataset_id not None``), and optionally a
794 formatter class or its fully-qualified string name. If a formatter
795 is not provided, the one the datastore would use for ``put`` on
796 that dataset is assumed.
797 transfer : `str`, optional
798 How (and whether) the dataset should be added to the datastore.
799 If `None` (default), the file must already be in a location
800 appropriate for the datastore (e.g. within its root directory),
801 and will not be modified. Other choices include "move", "copy",
802 "link", "symlink", "relsymlink", and "hardlink". "link" is a
803 special transfer mode that will first try to make a hardlink and
804 if that fails a symlink will be used instead. "relsymlink" creates
805 a relative symlink rather than use an absolute path.
806 Most datastores do not support all transfer modes.
807 "auto" is a special option that will let the
808 data store choose the most natural option for itself.
809 record_validation_info : `bool`, optional
810 If `True`, the default, the datastore can record validation
811 information associated with the file. If `False` the datastore
812 will not attempt to track any information such as checksums
813 or file sizes. This can be useful if such information is tracked
814 in an external system or if the file is to be compressed in place.
815 It is up to the datastore whether this parameter is relevant.
817 Raises
818 ------
819 NotImplementedError
820 Raised if the datastore does not support the given transfer mode
821 (including the case where ingest is not supported at all).
822 DatasetTypeNotSupportedError
823 Raised if one or more files to be ingested have a dataset type that
824 is not supported by the datastore.
825 FileNotFoundError
826 Raised if one of the given files does not exist.
827 FileExistsError
828 Raised if transfer is not `None` but the (internal) location the
829 file would be moved to is already occupied.
831 Notes
832 -----
833 Subclasses should implement `_prepIngest` and `_finishIngest` instead
834 of implementing `ingest` directly. Datastores that hold and
835 delegate to child datastores may want to call those methods as well.
837 Subclasses are encouraged to document their supported transfer modes
838 in their class documentation.
839 """
840 # Allow a datastore to select a default transfer mode
841 transfer = self._overrideTransferMode(*datasets, transfer=transfer)
842 prepData = self._prepIngest(*datasets, transfer=transfer)
843 refs = {ref.id: ref for dataset in datasets for ref in dataset.refs}
844 if refs.keys() != prepData.refs.keys():
845 unsupported = refs.keys() - prepData.refs.keys()
846 # Group unsupported refs by DatasetType for an informative
847 # but still concise error message.
848 byDatasetType = defaultdict(list)
849 for datasetId in unsupported:
850 ref = refs[datasetId]
851 byDatasetType[ref.datasetType].append(ref)
852 raise DatasetTypeNotSupportedError(
853 "DatasetType(s) not supported in ingest: "
854 + ", ".join(f"{k.name} ({len(v)} dataset(s))" for k, v in byDatasetType.items())
855 )
856 self._finishIngest(prepData, transfer=transfer, record_validation_info=record_validation_info)
858 def transfer_from(
859 self,
860 source_datastore: Datastore,
861 refs: Collection[DatasetRef],
862 transfer: str = "auto",
863 artifact_existence: dict[ResourcePath, bool] | None = None,
864 dry_run: bool = False,
865 ) -> tuple[set[DatasetRef], set[DatasetRef]]:
866 """Transfer dataset artifacts from another datastore to this one.
868 Parameters
869 ----------
870 source_datastore : `Datastore`
871 The datastore from which to transfer artifacts. That datastore
872 must be compatible with this datastore receiving the artifacts.
873 refs : `~collections.abc.Collection` of `DatasetRef`
874 The datasets to transfer from the source datastore.
875 transfer : `str`, optional
876 How (and whether) the dataset should be added to the datastore.
877 Choices include "move", "copy",
878 "link", "symlink", "relsymlink", and "hardlink". "link" is a
879 special transfer mode that will first try to make a hardlink and
880 if that fails a symlink will be used instead. "relsymlink" creates
881 a relative symlink rather than use an absolute path.
882 Most datastores do not support all transfer modes.
883 "auto" (the default) is a special option that will let the
884 data store choose the most natural option for itself.
885 If the source location and transfer location are identical the
886 transfer mode will be ignored.
887 artifact_existence : `dict` [`lsst.resources.ResourcePath`, `bool`]
888 Optional mapping of datastore artifact to existence. Updated by
889 this method with details of all artifacts tested. Can be `None`
890 if the caller is not interested.
891 dry_run : `bool`, optional
892 Process the supplied source refs without updating the target
893 datastore.
895 Returns
896 -------
897 accepted : `set` [`DatasetRef`]
898 The datasets that were transferred.
899 rejected : `set` [`DatasetRef`]
900 The datasets that were rejected due to a constraints violation.
902 Raises
903 ------
904 TypeError
905 Raised if the two datastores are not compatible.
906 """
907 if type(self) is not type(source_datastore):
908 raise TypeError(
909 f"Datastore mismatch between this datastore ({type(self)}) and the "
910 f"source datastore ({type(source_datastore)})."
911 )
913 raise NotImplementedError(f"Datastore {type(self)} must implement a transfer_from method.")
915 def getManyURIs(
916 self,
917 refs: Iterable[DatasetRef],
918 predict: bool = False,
919 allow_missing: bool = False,
920 ) -> dict[DatasetRef, DatasetRefURIs]:
921 """Return URIs associated with many datasets.
923 Parameters
924 ----------
925 refs : iterable of `DatasetIdRef`
926 References to the required datasets.
927 predict : `bool`, optional
928 If `True`, allow URIs to be returned of datasets that have not
929 been written.
930 allow_missing : `bool`
931 If `False`, and ``predict`` is `False`, will raise if a
932 `DatasetRef` does not exist.
934 Returns
935 -------
936 URIs : `dict` of [`DatasetRef`, `DatasetRefUris`]
937 A dict of primary and component URIs, indexed by the passed-in
938 refs.
940 Raises
941 ------
942 FileNotFoundError
943 A URI has been requested for a dataset that does not exist and
944 guessing is not allowed.
946 Notes
947 -----
948 In file-based datastores, getManyURIs does not check that the file is
949 really there, it's assuming it is if datastore is aware of the file
950 then it actually exists.
951 """
952 uris: dict[DatasetRef, DatasetRefURIs] = {}
953 missing_refs = []
954 for ref in refs:
955 try:
956 uris[ref] = self.getURIs(ref, predict=predict)
957 except FileNotFoundError:
958 missing_refs.append(ref)
959 if missing_refs and not allow_missing:
960 num_missing = len(missing_refs)
961 raise FileNotFoundError(
962 f"Missing {num_missing} refs from datastore out of "
963 f"{num_missing + len(uris)} and predict=False."
964 )
965 return uris
967 @abstractmethod
968 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs:
969 """Return URIs associated with dataset.
971 Parameters
972 ----------
973 datasetRef : `DatasetRef`
974 Reference to the required dataset.
975 predict : `bool`, optional
976 If the datastore does not know about the dataset, controls whether
977 it should return a predicted URI or not.
979 Returns
980 -------
981 uris : `DatasetRefURIs`
982 The URI to the primary artifact associated with this dataset (if
983 the dataset was disassembled within the datastore this may be
984 `None`), and the URIs to any components associated with the dataset
985 artifact. (can be empty if there are no components).
986 """
987 raise NotImplementedError()
989 @abstractmethod
990 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath:
991 """URI to the Dataset.
993 Parameters
994 ----------
995 datasetRef : `DatasetRef`
996 Reference to the required Dataset.
997 predict : `bool`
998 If `True` attempt to predict the URI for a dataset if it does
999 not exist in datastore.
1001 Returns
1002 -------
1003 uri : `str`
1004 URI string pointing to the Dataset within the datastore. If the
1005 Dataset does not exist in the datastore, the URI may be a guess.
1006 If the datastore does not have entities that relate well
1007 to the concept of a URI the returned URI string will be
1008 descriptive. The returned URI is not guaranteed to be obtainable.
1010 Raises
1011 ------
1012 FileNotFoundError
1013 A URI has been requested for a dataset that does not exist and
1014 guessing is not allowed.
1015 """
1016 raise NotImplementedError("Must be implemented by subclass")
1018 @abstractmethod
1019 def retrieveArtifacts(
1020 self,
1021 refs: Iterable[DatasetRef],
1022 destination: ResourcePath,
1023 transfer: str = "auto",
1024 preserve_path: bool = True,
1025 overwrite: bool = False,
1026 ) -> list[ResourcePath]:
1027 """Retrieve the artifacts associated with the supplied refs.
1029 Parameters
1030 ----------
1031 refs : iterable of `DatasetRef`
1032 The datasets for which artifacts are to be retrieved.
1033 A single ref can result in multiple artifacts. The refs must
1034 be resolved.
1035 destination : `lsst.resources.ResourcePath`
1036 Location to write the artifacts.
1037 transfer : `str`, optional
1038 Method to use to transfer the artifacts. Must be one of the options
1039 supported by `lsst.resources.ResourcePath.transfer_from()`.
1040 "move" is not allowed.
1041 preserve_path : `bool`, optional
1042 If `True` the full path of the artifact within the datastore
1043 is preserved. If `False` the final file component of the path
1044 is used.
1045 overwrite : `bool`, optional
1046 If `True` allow transfers to overwrite existing files at the
1047 destination.
1049 Returns
1050 -------
1051 targets : `list` of `lsst.resources.ResourcePath`
1052 URIs of file artifacts in destination location. Order is not
1053 preserved.
1055 Notes
1056 -----
1057 For non-file datastores the artifacts written to the destination
1058 may not match the representation inside the datastore. For example
1059 a hierarchichal data structure in a NoSQL database may well be stored
1060 as a JSON file.
1061 """
1062 raise NotImplementedError()
1064 @abstractmethod
1065 def remove(self, datasetRef: DatasetRef) -> None:
1066 """Indicate to the Datastore that a Dataset can be removed.
1068 Parameters
1069 ----------
1070 datasetRef : `DatasetRef`
1071 Reference to the required Dataset.
1073 Raises
1074 ------
1075 FileNotFoundError
1076 When Dataset does not exist.
1078 Notes
1079 -----
1080 Some Datastores may implement this method as a silent no-op to
1081 disable Dataset deletion through standard interfaces.
1082 """
1083 raise NotImplementedError("Must be implemented by subclass")
1085 @abstractmethod
1086 def forget(self, refs: Iterable[DatasetRef]) -> None:
1087 """Indicate to the Datastore that it should remove all records of the
1088 given datasets, without actually deleting them.
1090 Parameters
1091 ----------
1092 refs : `~collections.abc.Iterable` [ `DatasetRef` ]
1093 References to the datasets being forgotten.
1095 Notes
1096 -----
1097 Asking a datastore to forget a `DatasetRef` it does not hold should be
1098 a silent no-op, not an error.
1099 """
1100 raise NotImplementedError("Must be implemented by subclass")
1102 @abstractmethod
1103 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None:
1104 """Indicate to the Datastore that a Dataset can be moved to the trash.
1106 Parameters
1107 ----------
1108 ref : `DatasetRef` or iterable thereof
1109 Reference(s) to the required Dataset.
1110 ignore_errors : `bool`, optional
1111 Determine whether errors should be ignored. When multiple
1112 refs are being trashed there will be no per-ref check.
1114 Raises
1115 ------
1116 FileNotFoundError
1117 When Dataset does not exist and errors are not ignored. Only
1118 checked if a single ref is supplied (and not in a list).
1120 Notes
1121 -----
1122 Some Datastores may implement this method as a silent no-op to
1123 disable Dataset deletion through standard interfaces.
1124 """
1125 raise NotImplementedError("Must be implemented by subclass")
1127 @abstractmethod
1128 def emptyTrash(self, ignore_errors: bool = True) -> None:
1129 """Remove all datasets from the trash.
1131 Parameters
1132 ----------
1133 ignore_errors : `bool`, optional
1134 Determine whether errors should be ignored.
1136 Notes
1137 -----
1138 Some Datastores may implement this method as a silent no-op to
1139 disable Dataset deletion through standard interfaces.
1140 """
1141 raise NotImplementedError("Must be implemented by subclass")
1143 @abstractmethod
1144 def transfer(self, inputDatastore: Datastore, datasetRef: DatasetRef) -> None:
1145 """Transfer a dataset from another datastore to this datastore.
1147 Parameters
1148 ----------
1149 inputDatastore : `Datastore`
1150 The external `Datastore` from which to retrieve the Dataset.
1151 datasetRef : `DatasetRef`
1152 Reference to the required Dataset.
1153 """
1154 raise NotImplementedError("Must be implemented by subclass")
1156 def export(
1157 self,
1158 refs: Iterable[DatasetRef],
1159 *,
1160 directory: ResourcePathExpression | None = None,
1161 transfer: str | None = "auto",
1162 ) -> Iterable[FileDataset]:
1163 """Export datasets for transfer to another data repository.
1165 Parameters
1166 ----------
1167 refs : iterable of `DatasetRef`
1168 Dataset references to be exported.
1169 directory : `str`, optional
1170 Path to a directory that should contain files corresponding to
1171 output datasets. Ignored if ``transfer`` is explicitly `None`.
1172 transfer : `str`, optional
1173 Mode that should be used to move datasets out of the repository.
1174 Valid options are the same as those of the ``transfer`` argument
1175 to ``ingest``, and datastores may similarly signal that a transfer
1176 mode is not supported by raising `NotImplementedError`. If "auto"
1177 is given and no ``directory`` is specified, `None` will be
1178 implied.
1180 Returns
1181 -------
1182 dataset : iterable of `DatasetTransfer`
1183 Structs containing information about the exported datasets, in the
1184 same order as ``refs``.
1186 Raises
1187 ------
1188 NotImplementedError
1189 Raised if the given transfer mode is not supported.
1190 """
1191 raise NotImplementedError(f"Transfer mode {transfer} not supported.")
1193 @abstractmethod
1194 def validateConfiguration(
1195 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False
1196 ) -> None:
1197 """Validate some of the configuration for this datastore.
1199 Parameters
1200 ----------
1201 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass`
1202 Entities to test against this configuration. Can be differing
1203 types.
1204 logFailures : `bool`, optional
1205 If `True`, output a log message for every validation error
1206 detected.
1208 Raises
1209 ------
1210 DatastoreValidationError
1211 Raised if there is a validation problem with a configuration.
1213 Notes
1214 -----
1215 Which parts of the configuration are validated is at the discretion
1216 of each Datastore implementation.
1217 """
1218 raise NotImplementedError("Must be implemented by subclass")
1220 @abstractmethod
1221 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None:
1222 """Validate a specific look up key with supplied entity.
1224 Parameters
1225 ----------
1226 lookupKey : `LookupKey`
1227 Key to use to retrieve information from the datastore
1228 configuration.
1229 entity : `DatasetRef`, `DatasetType`, or `StorageClass`
1230 Entity to compare with configuration retrieved using the
1231 specified lookup key.
1233 Raises
1234 ------
1235 DatastoreValidationError
1236 Raised if there is a problem with the combination of entity
1237 and lookup key.
1239 Notes
1240 -----
1241 Bypasses the normal selection priorities by allowing a key that
1242 would normally not be selected to be validated.
1243 """
1244 raise NotImplementedError("Must be implemented by subclass")
1246 @abstractmethod
1247 def getLookupKeys(self) -> set[LookupKey]:
1248 """Return all the lookup keys relevant to this datastore.
1250 Returns
1251 -------
1252 keys : `set` of `LookupKey`
1253 The keys stored internally for looking up information based
1254 on `DatasetType` name or `StorageClass`.
1255 """
1256 raise NotImplementedError("Must be implemented by subclass")
1258 def needs_expanded_data_ids(
1259 self,
1260 transfer: str | None,
1261 entity: DatasetRef | DatasetType | StorageClass | None = None,
1262 ) -> bool:
1263 """Test whether this datastore needs expanded data IDs to ingest.
1265 Parameters
1266 ----------
1267 transfer : `str` or `None`
1268 Transfer mode for ingest.
1269 entity : `DatasetRef` or `DatasetType` or `StorageClass` or `None`, \
1270 optional
1271 Object representing what will be ingested. If not provided (or not
1272 specific enough), `True` may be returned even if expanded data
1273 IDs aren't necessary.
1275 Returns
1276 -------
1277 needed : `bool`
1278 If `True`, expanded data IDs may be needed. `False` only if
1279 expansion definitely isn't necessary.
1280 """
1281 return True
1283 @abstractmethod
1284 def import_records(
1285 self,
1286 data: Mapping[str, DatastoreRecordData],
1287 ) -> None:
1288 """Import datastore location and record data from an in-memory data
1289 structure.
1291 Parameters
1292 ----------
1293 data : `~collections.abc.Mapping` [ `str`, `DatastoreRecordData` ]
1294 Datastore records indexed by datastore name. May contain data for
1295 other `Datastore` instances (generally because they are chained to
1296 this one), which should be ignored.
1298 Notes
1299 -----
1300 Implementations should generally not check that any external resources
1301 (e.g. files) referred to by these records actually exist, for
1302 performance reasons; we expect higher-level code to guarantee that they
1303 do.
1305 Implementations are responsible for calling
1306 `DatastoreRegistryBridge.insert` on all datasets in ``data.locations``
1307 where the key is in `names`, as well as loading any opaque table data.
1309 Implementations may assume that datasets are either fully present or
1310 not at all (single-component exports are not permitted).
1311 """
1312 raise NotImplementedError()
1314 @abstractmethod
1315 def export_records(
1316 self,
1317 refs: Iterable[DatasetIdRef],
1318 ) -> Mapping[str, DatastoreRecordData]:
1319 """Export datastore records and locations to an in-memory data
1320 structure.
1322 Parameters
1323 ----------
1324 refs : `~collections.abc.Iterable` [ `DatasetIdRef` ]
1325 Datasets to save. This may include datasets not known to this
1326 datastore, which should be ignored. May not include component
1327 datasets.
1329 Returns
1330 -------
1331 data : `~collections.abc.Mapping` [ `str`, `DatastoreRecordData` ]
1332 Exported datastore records indexed by datastore name.
1333 """
1334 raise NotImplementedError()
1336 def set_retrieve_dataset_type_method(self, method: Callable[[str], DatasetType | None] | None) -> None:
1337 """Specify a method that can be used by datastore to retrieve
1338 registry-defined dataset type.
1340 Parameters
1341 ----------
1342 method : `~collections.abc.Callable` | `None`
1343 Method that takes a name of the dataset type and returns a
1344 corresponding `DatasetType` instance as defined in Registry. If
1345 dataset type name is not known to registry `None` is returned.
1347 Notes
1348 -----
1349 This method is only needed for a Datastore supporting a "trusted" mode
1350 when it does not have an access to datastore records and needs to
1351 guess dataset location based on its stored dataset type.
1352 """
1353 pass
1355 @abstractmethod
1356 def get_opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]:
1357 """Make definitions of the opaque tables used by this Datastore.
1359 Returns
1360 -------
1361 tables : `~collections.abc.Mapping` [ `str`, `.ddl.TableSpec` ]
1362 Mapping of opaque table names to their definitions. This can be an
1363 empty mapping if Datastore does not use opaque tables to keep
1364 datastore records.
1365 """
1366 raise NotImplementedError()
1369class NullDatastore(Datastore):
1370 """A datastore that implements the `Datastore` API but always fails when
1371 it accepts any request.
1373 Parameters
1374 ----------
1375 config : `Config` or `~lsst.resources.ResourcePathExpression` or `None`
1376 Ignored.
1377 bridgeManager : `DatastoreRegistryBridgeManager` or `None`
1378 Ignored.
1379 butlerRoot : `~lsst.resources.ResourcePathExpression` or `None`
1380 Ignored.
1381 """
1383 @classmethod
1384 def _create_from_config(
1385 cls,
1386 config: Config,
1387 bridgeManager: DatastoreRegistryBridgeManager,
1388 butlerRoot: ResourcePathExpression | None = None,
1389 ) -> NullDatastore:
1390 return NullDatastore(config, bridgeManager, butlerRoot)
1392 def clone(self, bridgeManager: DatastoreRegistryBridgeManager) -> Datastore:
1393 return self
1395 @classmethod
1396 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None:
1397 # Nothing to do. This is not a real Datastore.
1398 pass
1400 def __init__(
1401 self,
1402 config: Config | ResourcePathExpression | None,
1403 bridgeManager: DatastoreRegistryBridgeManager | None,
1404 butlerRoot: ResourcePathExpression | None = None,
1405 ):
1406 # Name ourselves with the timestamp the datastore
1407 # was created.
1408 self.name = f"{type(self).__name__}@{time.time()}"
1409 _LOG.debug("Creating datastore %s", self.name)
1411 return
1413 def knows(self, ref: DatasetRef) -> bool:
1414 return False
1416 def exists(self, datasetRef: DatasetRef) -> bool:
1417 return False
1419 def get(
1420 self,
1421 datasetRef: DatasetRef,
1422 parameters: Mapping[str, Any] | None = None,
1423 storageClass: StorageClass | str | None = None,
1424 ) -> Any:
1425 raise FileNotFoundError("This is a no-op datastore that can not access a real datastore")
1427 def put(self, inMemoryDataset: Any, datasetRef: DatasetRef) -> None:
1428 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1430 def put_new(self, in_memory_dataset: Any, ref: DatasetRef) -> Mapping[str, DatasetRef]:
1431 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1433 def ingest(
1434 self, *datasets: FileDataset, transfer: str | None = None, record_validation_info: bool = True
1435 ) -> None:
1436 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1438 def transfer_from(
1439 self,
1440 source_datastore: Datastore,
1441 refs: Iterable[DatasetRef],
1442 transfer: str = "auto",
1443 artifact_existence: dict[ResourcePath, bool] | None = None,
1444 dry_run: bool = False,
1445 ) -> tuple[set[DatasetRef], set[DatasetRef]]:
1446 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1448 def getURIs(self, datasetRef: DatasetRef, predict: bool = False) -> DatasetRefURIs:
1449 raise FileNotFoundError("This is a no-op datastore that can not access a real datastore")
1451 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ResourcePath:
1452 raise FileNotFoundError("This is a no-op datastore that can not access a real datastore")
1454 def retrieveArtifacts(
1455 self,
1456 refs: Iterable[DatasetRef],
1457 destination: ResourcePath,
1458 transfer: str = "auto",
1459 preserve_path: bool = True,
1460 overwrite: bool = False,
1461 ) -> list[ResourcePath]:
1462 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1464 def remove(self, datasetRef: DatasetRef) -> None:
1465 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1467 def forget(self, refs: Iterable[DatasetRef]) -> None:
1468 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1470 def trash(self, ref: DatasetRef | Iterable[DatasetRef], ignore_errors: bool = True) -> None:
1471 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1473 def emptyTrash(self, ignore_errors: bool = True) -> None:
1474 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1476 def transfer(self, inputDatastore: Datastore, datasetRef: DatasetRef) -> None:
1477 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1479 def export(
1480 self,
1481 refs: Iterable[DatasetRef],
1482 *,
1483 directory: ResourcePathExpression | None = None,
1484 transfer: str | None = "auto",
1485 ) -> Iterable[FileDataset]:
1486 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1488 def validateConfiguration(
1489 self, entities: Iterable[DatasetRef | DatasetType | StorageClass], logFailures: bool = False
1490 ) -> None:
1491 # No configuration so always validates.
1492 pass
1494 def validateKey(self, lookupKey: LookupKey, entity: DatasetRef | DatasetType | StorageClass) -> None:
1495 pass
1497 def getLookupKeys(self) -> set[LookupKey]:
1498 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1500 def import_records(
1501 self,
1502 data: Mapping[str, DatastoreRecordData],
1503 ) -> None:
1504 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1506 def export_records(
1507 self,
1508 refs: Iterable[DatasetIdRef],
1509 ) -> Mapping[str, DatastoreRecordData]:
1510 raise NotImplementedError("This is a no-op datastore that can not access a real datastore")
1512 def get_opaque_table_definitions(self) -> Mapping[str, DatastoreOpaqueTable]:
1513 return {}