Coverage for python/lsst/daf/butler/registry/bridge/ephemeral.py: 32%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ("EphemeralDatastoreRegistryBridge",)
25from contextlib import contextmanager
26from typing import TYPE_CHECKING, Iterable, Iterator, Optional, Set, Tuple, Type
28from lsst.daf.butler import DatasetId
29from lsst.daf.butler.registry.interfaces import (
30 DatasetIdRef,
31 DatastoreRegistryBridge,
32 FakeDatasetRef,
33 OpaqueTableStorage,
34)
36if TYPE_CHECKING: 36 ↛ 37line 36 didn't jump to line 37, because the condition on line 36 was never true
37 from lsst.daf.butler import StoredDatastoreItemInfo
40class EphemeralDatastoreRegistryBridge(DatastoreRegistryBridge):
41 """An implementation of `DatastoreRegistryBridge` for ephemeral datastores
42 - those whose artifacts never outlive the current process.
44 Parameters
45 ----------
46 datastoreName : `str`
47 Name of the `Datastore` as it should appear in `Registry` tables
48 referencing it.
50 Notes
51 -----
52 The current implementation just uses a Python set to remember the dataset
53 IDs associated with the datastore. This will probably need to be converted
54 to use in-database temporary tables instead in the future to support
55 "in-datastore" constraints in `Registry.queryDatasets`.
56 """
58 def __init__(self, datastoreName: str):
59 super().__init__(datastoreName)
60 self._datasetIds: Set[DatasetId] = set()
61 self._trashedIds: Set[DatasetId] = set()
63 def insert(self, refs: Iterable[DatasetIdRef]) -> None:
64 # Docstring inherited from DatastoreRegistryBridge
65 self._datasetIds.update(ref.getCheckedId() for ref in refs)
67 def forget(self, refs: Iterable[DatasetIdRef]) -> None:
68 self._datasetIds.difference_update(ref.id for ref in refs)
70 def moveToTrash(self, refs: Iterable[DatasetIdRef]) -> None:
71 # Docstring inherited from DatastoreRegistryBridge
72 self._trashedIds.update(ref.getCheckedId() for ref in refs)
74 def check(self, refs: Iterable[DatasetIdRef]) -> Iterable[DatasetIdRef]:
75 # Docstring inherited from DatastoreRegistryBridge
76 yield from (ref for ref in refs if ref in self)
78 def __contains__(self, ref: DatasetIdRef) -> bool:
79 return ref.getCheckedId() in self._datasetIds and ref.getCheckedId() not in self._trashedIds
81 @contextmanager
82 def emptyTrash(
83 self,
84 records_table: Optional[OpaqueTableStorage] = None,
85 record_class: Optional[Type[StoredDatastoreItemInfo]] = None,
86 record_column: Optional[str] = None,
87 ) -> Iterator[
88 Tuple[Iterable[Tuple[DatasetIdRef, Optional[StoredDatastoreItemInfo]]], Optional[Set[str]]]
89 ]:
90 # Docstring inherited from DatastoreRegistryBridge
91 matches: Iterable[Tuple[FakeDatasetRef, Optional[StoredDatastoreItemInfo]]] = ()
92 if isinstance(records_table, OpaqueTableStorage):
93 if record_class is None:
94 raise ValueError("Record class must be provided if records table is given.")
95 matches = (
96 (FakeDatasetRef(id), record_class.from_record(record))
97 for id in self._trashedIds
98 for record in records_table.fetch(dataset_id=id)
99 )
100 else:
101 matches = ((FakeDatasetRef(id), None) for id in self._trashedIds)
103 # Indicate to caller that we do not know about artifacts that
104 # should be retained.
105 yield ((matches, None))
107 if isinstance(records_table, OpaqueTableStorage):
108 # Remove the records entries
109 records_table.delete(["dataset_id"], *[{"dataset_id": id} for id in self._trashedIds])
111 # Empty the trash table
112 self._datasetIds.difference_update(self._trashedIds)
113 self._trashedIds = set()