Coverage for python/lsst/daf/butler/core/datastore.py : 47%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22"""
23Support for generic data stores.
24"""
26from __future__ import annotations
28__all__ = ("DatastoreConfig", "Datastore", "DatastoreValidationError")
30import contextlib
31import logging
32from collections import defaultdict
33from typing import (
34 TYPE_CHECKING,
35 Any,
36 Callable,
37 ClassVar,
38 Dict,
39 Iterable,
40 Iterator,
41 List,
42 Mapping,
43 Optional,
44 Set,
45 Tuple,
46 Type,
47 Union,
48)
50from dataclasses import dataclass
51from abc import ABCMeta, abstractmethod
53from lsst.utils import doImport
54from .config import ConfigSubset, Config
55from .exceptions import ValidationError, DatasetTypeNotSupportedError
56from .constraints import Constraints
57from .storageClass import StorageClassFactory
59if TYPE_CHECKING: 59 ↛ 60line 59 didn't jump to line 60, because the condition on line 59 was never true
60 from ..registry.interfaces import DatastoreRegistryBridgeManager
61 from .datasets import DatasetRef, DatasetType
62 from .configSupport import LookupKey
63 from .repoTransfers import FileDataset
64 from .storageClass import StorageClass
65 from .location import ButlerURI
68class DatastoreConfig(ConfigSubset):
69 component = "datastore"
70 requiredKeys = ("cls",)
71 defaultConfigFile = "datastore.yaml"
74class DatastoreValidationError(ValidationError):
75 """There is a problem with the Datastore configuration.
76 """
77 pass
80@dataclass(frozen=True)
81class Event:
82 __slots__ = {"name", "undoFunc", "args", "kwargs"}
83 name: str
84 undoFunc: Callable
85 args: tuple
86 kwargs: dict
89class IngestPrepData:
90 """A helper base class for `Datastore` ingest implementations.
92 Datastore implementations will generally need a custom implementation of
93 this class.
95 Should be accessed as ``Datastore.IngestPrepData`` instead of via direct
96 import.
98 Parameters
99 ----------
100 refs : iterable of `DatasetRef`
101 References for the datasets that can be ingested by this datastore.
102 """
103 def __init__(self, refs: Iterable[DatasetRef]):
104 self.refs = {ref.id: ref for ref in refs}
107class DatastoreTransaction:
108 """Keeps a log of `Datastore` activity and allow rollback.
110 Parameters
111 ----------
112 parent : `DatastoreTransaction`, optional
113 The parent transaction (if any)
114 """
115 Event: ClassVar[Type] = Event
117 parent: Optional['DatastoreTransaction']
118 """The parent transaction. (`DatastoreTransaction`, optional)"""
120 def __init__(self, parent: Optional[DatastoreTransaction] = None):
121 self.parent = parent
122 self._log: List[Event] = []
124 def registerUndo(self, name: str, undoFunc: Callable, *args: Any, **kwargs: Any) -> None:
125 """Register event with undo function.
127 Parameters
128 ----------
129 name : `str`
130 Name of the event.
131 undoFunc : func
132 Function to undo this event.
133 args : `tuple`
134 Positional arguments to `undoFunc`.
135 kwargs : `dict`
136 Keyword arguments to `undoFunc`.
137 """
138 self._log.append(self.Event(name, undoFunc, args, kwargs))
140 @contextlib.contextmanager
141 def undoWith(self, name: str, undoFunc: Callable, *args: Any, **kwargs: Any) -> Iterator[None]:
142 """A context manager that calls `registerUndo` if the nested operation
143 does not raise an exception.
145 This can be used to wrap individual undo-able statements within a
146 DatastoreTransaction block. Multiple statements that can fail
147 separately should not be part of the same `undoWith` block.
149 All arguments are forwarded directly to `registerUndo`.
150 """
151 try:
152 yield None
153 except BaseException:
154 raise
155 else:
156 self.registerUndo(name, undoFunc, *args, **kwargs)
158 def rollback(self) -> None:
159 """Roll back all events in this transaction.
160 """
161 while self._log:
162 ev = self._log.pop()
163 try:
164 ev.undoFunc(*ev.args, **ev.kwargs)
165 except BaseException as e:
166 # Deliberately swallow error that may occur in unrolling
167 log = logging.getLogger(__name__)
168 log.warning("Exception: %s caught while unrolling: %s", e, ev.name)
169 pass
171 def commit(self) -> None:
172 """Commit this transaction.
173 """
174 if self.parent is None:
175 # Just forget about the events, they have already happened.
176 return
177 else:
178 # We may still want to events from this transaction as part of
179 # the parent.
180 self.parent._log.extend(self._log)
183class Datastore(metaclass=ABCMeta):
184 """Datastore interface.
186 Parameters
187 ----------
188 config : `DatastoreConfig` or `str`
189 Load configuration either from an existing config instance or by
190 referring to a configuration file.
191 bridgeManager : `DatastoreRegistryBridgeManager`
192 Object that manages the interface between `Registry` and datastores.
193 butlerRoot : `str`, optional
194 New datastore root to use to override the configuration value.
195 """
197 defaultConfigFile: ClassVar[Optional[str]] = None
198 """Path to configuration defaults. Relative to $DAF_BUTLER_DIR/config or
199 absolute path. Can be None if no defaults specified.
200 """
202 containerKey: ClassVar[Optional[str]] = None
203 """Name of the key containing a list of subconfigurations that also
204 need to be merged with defaults and will likely use different Python
205 datastore classes (but all using DatastoreConfig). Assumed to be a
206 list of configurations that can be represented in a DatastoreConfig
207 and containing a "cls" definition. None indicates that no containers
208 are expected in this Datastore."""
210 isEphemeral: bool = False
211 """Indicate whether this Datastore is ephemeral or not. An ephemeral
212 datastore is one where the contents of the datastore will not exist
213 across process restarts. This value can change per-instance."""
215 config: DatastoreConfig
216 """Configuration used to create Datastore."""
218 name: str
219 """Label associated with this Datastore."""
221 storageClassFactory: StorageClassFactory
222 """Factory for creating storage class instances from name."""
224 constraints: Constraints
225 """Constraints to apply when putting datasets into the datastore."""
227 IngestPrepData: ClassVar = IngestPrepData
228 """Helper base class for ingest implementations.
229 """
231 @classmethod
232 @abstractmethod
233 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None:
234 """Set any filesystem-dependent config options for this Datastore to
235 be appropriate for a new empty repository with the given root.
237 Parameters
238 ----------
239 root : `str`
240 Filesystem path to the root of the data repository.
241 config : `Config`
242 A `Config` to update. Only the subset understood by
243 this component will be updated. Will not expand
244 defaults.
245 full : `Config`
246 A complete config with all defaults expanded that can be
247 converted to a `DatastoreConfig`. Read-only and will not be
248 modified by this method.
249 Repository-specific options that should not be obtained
250 from defaults when Butler instances are constructed
251 should be copied from ``full`` to ``config``.
252 overwrite : `bool`, optional
253 If `False`, do not modify a value in ``config`` if the value
254 already exists. Default is always to overwrite with the provided
255 ``root``.
257 Notes
258 -----
259 If a keyword is explicitly defined in the supplied ``config`` it
260 will not be overridden by this method if ``overwrite`` is `False`.
261 This allows explicit values set in external configs to be retained.
262 """
263 raise NotImplementedError()
265 @staticmethod
266 def fromConfig(config: Config, bridgeManager: DatastoreRegistryBridgeManager,
267 butlerRoot: Optional[str] = None) -> 'Datastore':
268 """Create datastore from type specified in config file.
270 Parameters
271 ----------
272 config : `Config`
273 Configuration instance.
274 bridgeManager : `DatastoreRegistryBridgeManager`
275 Object that manages the interface between `Registry` and
276 datastores.
277 butlerRoot : `str`, optional
278 Butler root directory.
279 """
280 cls = doImport(config["datastore", "cls"])
281 return cls(config=config, bridgeManager=bridgeManager, butlerRoot=butlerRoot)
283 def __init__(self, config: Union[Config, str],
284 bridgeManager: DatastoreRegistryBridgeManager, butlerRoot: str = None):
285 self.config = DatastoreConfig(config)
286 self.name = "ABCDataStore"
287 self._transaction: Optional[DatastoreTransaction] = None
289 # All Datastores need storage classes and constraints
290 self.storageClassFactory = StorageClassFactory()
292 # And read the constraints list
293 constraintsConfig = self.config.get("constraints")
294 self.constraints = Constraints(constraintsConfig, universe=bridgeManager.universe)
296 def __str__(self) -> str:
297 return self.name
299 def __repr__(self) -> str:
300 return self.name
302 @property
303 def names(self) -> Tuple[str, ...]:
304 """Names associated with this datastore returned as a list.
306 Can be different to ``name`` for a chaining datastore.
307 """
308 # Default implementation returns solely the name itself
309 return (self.name, )
311 @contextlib.contextmanager
312 def transaction(self) -> Iterator[DatastoreTransaction]:
313 """Context manager supporting `Datastore` transactions.
315 Transactions can be nested, and are to be used in combination with
316 `Registry.transaction`.
317 """
318 self._transaction = DatastoreTransaction(self._transaction)
319 try:
320 yield self._transaction
321 except BaseException:
322 self._transaction.rollback()
323 raise
324 else:
325 self._transaction.commit()
326 self._transaction = self._transaction.parent
328 @abstractmethod
329 def exists(self, datasetRef: DatasetRef) -> bool:
330 """Check if the dataset exists in the datastore.
332 Parameters
333 ----------
334 datasetRef : `DatasetRef`
335 Reference to the required dataset.
337 Returns
338 -------
339 exists : `bool`
340 `True` if the entity exists in the `Datastore`.
341 """
342 raise NotImplementedError("Must be implemented by subclass")
344 @abstractmethod
345 def get(self, datasetRef: DatasetRef, parameters: Mapping[str, Any] = None) -> Any:
346 """Load an `InMemoryDataset` from the store.
348 Parameters
349 ----------
350 datasetRef : `DatasetRef`
351 Reference to the required Dataset.
352 parameters : `dict`
353 `StorageClass`-specific parameters that specify a slice of the
354 Dataset to be loaded.
356 Returns
357 -------
358 inMemoryDataset : `object`
359 Requested Dataset or slice thereof as an InMemoryDataset.
360 """
361 raise NotImplementedError("Must be implemented by subclass")
363 @abstractmethod
364 def put(self, inMemoryDataset: Any, datasetRef: DatasetRef) -> None:
365 """Write a `InMemoryDataset` with a given `DatasetRef` to the store.
367 Parameters
368 ----------
369 inMemoryDataset : `object`
370 The Dataset to store.
371 datasetRef : `DatasetRef`
372 Reference to the associated Dataset.
373 """
374 raise NotImplementedError("Must be implemented by subclass")
376 def _overrideTransferMode(self, *datasets: FileDataset, transfer: Optional[str] = None) -> Optional[str]:
377 """Allow ingest transfer mode to be defaulted based on datasets.
379 Parameters
380 ----------
381 datasets : `FileDataset`
382 Each positional argument is a struct containing information about
383 a file to be ingested, including its path (either absolute or
384 relative to the datastore root, if applicable), a complete
385 `DatasetRef` (with ``dataset_id not None``), and optionally a
386 formatter class or its fully-qualified string name. If a formatter
387 is not provided, this method should populate that attribute with
388 the formatter the datastore would use for `put`. Subclasses are
389 also permitted to modify the path attribute (typically to put it
390 in what the datastore considers its standard form).
391 transfer : `str`, optional
392 How (and whether) the dataset should be added to the datastore.
393 See `ingest` for details of transfer modes.
395 Returns
396 -------
397 newTransfer : `str`
398 Transfer mode to use. Will be identical to the supplied transfer
399 mode unless "auto" is used.
400 """
401 if transfer != "auto":
402 return transfer
403 raise RuntimeError(f"{transfer} is not allowed without specialization.")
405 def _prepIngest(self, *datasets: FileDataset, transfer: Optional[str] = None) -> IngestPrepData:
406 """Process datasets to identify which ones can be ingested into this
407 Datastore.
409 Parameters
410 ----------
411 datasets : `FileDataset`
412 Each positional argument is a struct containing information about
413 a file to be ingested, including its path (either absolute or
414 relative to the datastore root, if applicable), a complete
415 `DatasetRef` (with ``dataset_id not None``), and optionally a
416 formatter class or its fully-qualified string name. If a formatter
417 is not provided, this method should populate that attribute with
418 the formatter the datastore would use for `put`. Subclasses are
419 also permitted to modify the path attribute (typically to put it
420 in what the datastore considers its standard form).
421 transfer : `str`, optional
422 How (and whether) the dataset should be added to the datastore.
423 See `ingest` for details of transfer modes.
425 Returns
426 -------
427 data : `IngestPrepData`
428 An instance of a subclass of `IngestPrepData`, used to pass
429 arbitrary data from `_prepIngest` to `_finishIngest`. This should
430 include only the datasets this datastore can actually ingest;
431 others should be silently ignored (`Datastore.ingest` will inspect
432 `IngestPrepData.refs` and raise `DatasetTypeNotSupportedError` if
433 necessary).
435 Raises
436 ------
437 NotImplementedError
438 Raised if the datastore does not support the given transfer mode
439 (including the case where ingest is not supported at all).
440 FileNotFoundError
441 Raised if one of the given files does not exist.
442 FileExistsError
443 Raised if transfer is not `None` but the (internal) location the
444 file would be moved to is already occupied.
446 Notes
447 -----
448 This method (along with `_finishIngest`) should be implemented by
449 subclasses to provide ingest support instead of implementing `ingest`
450 directly.
452 `_prepIngest` should not modify the data repository or given files in
453 any way; all changes should be deferred to `_finishIngest`.
455 When possible, exceptions should be raised in `_prepIngest` instead of
456 `_finishIngest`. `NotImplementedError` exceptions that indicate that
457 the transfer mode is not supported must be raised by `_prepIngest`
458 instead of `_finishIngest`.
459 """
460 raise NotImplementedError(
461 "Datastore does not support direct file-based ingest."
462 )
464 def _finishIngest(self, prepData: IngestPrepData, *, transfer: Optional[str] = None) -> None:
465 """Complete an ingest operation.
467 Parameters
468 ----------
469 data : `IngestPrepData`
470 An instance of a subclass of `IngestPrepData`. Guaranteed to be
471 the direct result of a call to `_prepIngest` on this datastore.
472 transfer : `str`, optional
473 How (and whether) the dataset should be added to the datastore.
474 See `ingest` for details of transfer modes.
476 Raises
477 ------
478 FileNotFoundError
479 Raised if one of the given files does not exist.
480 FileExistsError
481 Raised if transfer is not `None` but the (internal) location the
482 file would be moved to is already occupied.
484 Notes
485 -----
486 This method (along with `_prepIngest`) should be implemented by
487 subclasses to provide ingest support instead of implementing `ingest`
488 directly.
489 """
490 raise NotImplementedError(
491 "Datastore does not support direct file-based ingest."
492 )
494 def ingest(self, *datasets: FileDataset, transfer: Optional[str] = None) -> None:
495 """Ingest one or more files into the datastore.
497 Parameters
498 ----------
499 datasets : `FileDataset`
500 Each positional argument is a struct containing information about
501 a file to be ingested, including its path (either absolute or
502 relative to the datastore root, if applicable), a complete
503 `DatasetRef` (with ``dataset_id not None``), and optionally a
504 formatter class or its fully-qualified string name. If a formatter
505 is not provided, the one the datastore would use for ``put`` on
506 that dataset is assumed.
507 transfer : `str`, optional
508 How (and whether) the dataset should be added to the datastore.
509 If `None` (default), the file must already be in a location
510 appropriate for the datastore (e.g. within its root directory),
511 and will not be modified. Other choices include "move", "copy",
512 "link", "symlink", "relsymlink", and "hardlink". "link" is a
513 special transfer mode that will first try to make a hardlink and
514 if that fails a symlink will be used instead. "relsymlink" creates
515 a relative symlink rather than use an absolute path.
516 Most datastores do not support all transfer modes.
517 "auto" is a special option that will let the
518 data store choose the most natural option for itself.
520 Raises
521 ------
522 NotImplementedError
523 Raised if the datastore does not support the given transfer mode
524 (including the case where ingest is not supported at all).
525 DatasetTypeNotSupportedError
526 Raised if one or more files to be ingested have a dataset type that
527 is not supported by the datastore.
528 FileNotFoundError
529 Raised if one of the given files does not exist.
530 FileExistsError
531 Raised if transfer is not `None` but the (internal) location the
532 file would be moved to is already occupied.
534 Notes
535 -----
536 Subclasses should implement `_prepIngest` and `_finishIngest` instead
537 of implementing `ingest` directly. Datastores that hold and
538 delegate to child datastores may want to call those methods as well.
540 Subclasses are encouraged to document their supported transfer modes
541 in their class documentation.
542 """
543 # Allow a datastore to select a default transfer mode
544 transfer = self._overrideTransferMode(*datasets, transfer=transfer)
545 prepData = self._prepIngest(*datasets, transfer=transfer)
546 refs = {ref.id: ref for dataset in datasets for ref in dataset.refs}
547 if refs.keys() != prepData.refs.keys():
548 unsupported = refs.keys() - prepData.refs.keys()
549 # Group unsupported refs by DatasetType for an informative
550 # but still concise error message.
551 byDatasetType = defaultdict(list)
552 for datasetId in unsupported:
553 ref = refs[datasetId]
554 byDatasetType[ref.datasetType].append(ref)
555 raise DatasetTypeNotSupportedError(
556 "DatasetType(s) not supported in ingest: "
557 + ", ".join(f"{k.name} ({len(v)} dataset(s))" for k, v in byDatasetType.items())
558 )
559 self._finishIngest(prepData, transfer=transfer)
561 @abstractmethod
562 def getURIs(self, datasetRef: DatasetRef,
563 predict: bool = False) -> Tuple[Optional[ButlerURI], Dict[str, ButlerURI]]:
564 """Return URIs associated with dataset.
566 Parameters
567 ----------
568 ref : `DatasetRef`
569 Reference to the required dataset.
570 predict : `bool`, optional
571 If the datastore does not know about the dataset, should it
572 return a predicted URI or not?
574 Returns
575 -------
576 primary : `ButlerURI`
577 The URI to the primary artifact associated with this dataset.
578 If the dataset was disassembled within the datastore this
579 may be `None`.
580 components : `dict`
581 URIs to any components associated with the dataset artifact.
582 Can be empty if there are no components.
583 """
584 raise NotImplementedError()
586 @abstractmethod
587 def getURI(self, datasetRef: DatasetRef, predict: bool = False) -> ButlerURI:
588 """URI to the Dataset.
590 Parameters
591 ----------
592 datasetRef : `DatasetRef`
593 Reference to the required Dataset.
594 predict : `bool`
595 If `True` attempt to predict the URI for a dataset if it does
596 not exist in datastore.
598 Returns
599 -------
600 uri : `str`
601 URI string pointing to the Dataset within the datastore. If the
602 Dataset does not exist in the datastore, the URI may be a guess.
603 If the datastore does not have entities that relate well
604 to the concept of a URI the returned URI string will be
605 descriptive. The returned URI is not guaranteed to be obtainable.
607 Raises
608 ------
609 FileNotFoundError
610 A URI has been requested for a dataset that does not exist and
611 guessing is not allowed.
612 """
613 raise NotImplementedError("Must be implemented by subclass")
615 @abstractmethod
616 def remove(self, datasetRef: DatasetRef) -> None:
617 """Indicate to the Datastore that a Dataset can be removed.
619 Parameters
620 ----------
621 datasetRef : `DatasetRef`
622 Reference to the required Dataset.
624 Raises
625 ------
626 FileNotFoundError
627 When Dataset does not exist.
629 Notes
630 -----
631 Some Datastores may implement this method as a silent no-op to
632 disable Dataset deletion through standard interfaces.
633 """
634 raise NotImplementedError("Must be implemented by subclass")
636 @abstractmethod
637 def trash(self, datasetRef: DatasetRef, ignore_errors: bool = True) -> None:
638 """Indicate to the Datastore that a Dataset can be moved to the trash.
640 Parameters
641 ----------
642 datasetRef : `DatasetRef`
643 Reference to the required Dataset.
644 ignore_errors : `bool`, optional
645 Determine whether errors should be ignored.
647 Raises
648 ------
649 FileNotFoundError
650 When Dataset does not exist.
652 Notes
653 -----
654 Some Datastores may implement this method as a silent no-op to
655 disable Dataset deletion through standard interfaces.
656 """
657 raise NotImplementedError("Must be implemented by subclass")
659 @abstractmethod
660 def emptyTrash(self, ignore_errors: bool = True) -> None:
661 """Remove all datasets from the trash.
663 Parameters
664 ----------
665 ignore_errors : `bool`, optional
666 Determine whether errors should be ignored.
668 Notes
669 -----
670 Some Datastores may implement this method as a silent no-op to
671 disable Dataset deletion through standard interfaces.
672 """
673 raise NotImplementedError("Must be implemented by subclass")
675 @abstractmethod
676 def transfer(self, inputDatastore: Datastore, datasetRef: DatasetRef) -> None:
677 """Retrieve a Dataset from an input `Datastore`, and store the result
678 in this `Datastore`.
680 Parameters
681 ----------
682 inputDatastore : `Datastore`
683 The external `Datastore` from which to retreive the Dataset.
684 datasetRef : `DatasetRef`
685 Reference to the required Dataset.
686 """
687 raise NotImplementedError("Must be implemented by subclass")
689 def export(self, refs: Iterable[DatasetRef], *,
690 directory: Optional[str] = None, transfer: Optional[str] = None) -> Iterable[FileDataset]:
691 """Export datasets for transfer to another data repository.
693 Parameters
694 ----------
695 refs : iterable of `DatasetRef`
696 Dataset references to be exported.
697 directory : `str`, optional
698 Path to a directory that should contain files corresponding to
699 output datasets. Ignored if ``transfer`` is `None`.
700 transfer : `str`, optional
701 Mode that should be used to move datasets out of the repository.
702 Valid options are the same as those of the ``transfer`` argument
703 to ``ingest``, and datastores may similarly signal that a transfer
704 mode is not supported by raising `NotImplementedError`.
706 Returns
707 -------
708 dataset : iterable of `DatasetTransfer`
709 Structs containing information about the exported datasets, in the
710 same order as ``refs``.
712 Raises
713 ------
714 NotImplementedError
715 Raised if the given transfer mode is not supported.
716 """
717 raise NotImplementedError(f"Transfer mode {transfer} not supported.")
719 @abstractmethod
720 def validateConfiguration(self, entities: Iterable[Union[DatasetRef, DatasetType, StorageClass]],
721 logFailures: bool = False) -> None:
722 """Validate some of the configuration for this datastore.
724 Parameters
725 ----------
726 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass`
727 Entities to test against this configuration. Can be differing
728 types.
729 logFailures : `bool`, optional
730 If `True`, output a log message for every validation error
731 detected.
733 Raises
734 ------
735 DatastoreValidationError
736 Raised if there is a validation problem with a configuration.
738 Notes
739 -----
740 Which parts of the configuration are validated is at the discretion
741 of each Datastore implementation.
742 """
743 raise NotImplementedError("Must be implemented by subclass")
745 @abstractmethod
746 def validateKey(self,
747 lookupKey: LookupKey, entity: Union[DatasetRef, DatasetType, StorageClass]) -> None:
748 """Validate a specific look up key with supplied entity.
750 Parameters
751 ----------
752 lookupKey : `LookupKey`
753 Key to use to retrieve information from the datastore
754 configuration.
755 entity : `DatasetRef`, `DatasetType`, or `StorageClass`
756 Entity to compare with configuration retrieved using the
757 specified lookup key.
759 Raises
760 ------
761 DatastoreValidationError
762 Raised if there is a problem with the combination of entity
763 and lookup key.
765 Notes
766 -----
767 Bypasses the normal selection priorities by allowing a key that
768 would normally not be selected to be validated.
769 """
770 raise NotImplementedError("Must be implemented by subclass")
772 @abstractmethod
773 def getLookupKeys(self) -> Set[LookupKey]:
774 """Return all the lookup keys relevant to this datastore.
776 Returns
777 -------
778 keys : `set` of `LookupKey`
779 The keys stored internally for looking up information based
780 on `DatasetType` name or `StorageClass`.
781 """
782 raise NotImplementedError("Must be implemented by subclass")