Coverage for python/lsst/daf/butler/registry/bridge/ephemeral.py: 29%

36 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-05-02 18:18 -0700

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23__all__ = ("EphemeralDatastoreRegistryBridge",) 

24 

25from contextlib import contextmanager 

26from typing import TYPE_CHECKING, Iterable, Iterator, Optional, Set, Tuple, Type 

27 

28from ...core import DatasetId 

29from ..interfaces import DatasetIdRef, DatastoreRegistryBridge, FakeDatasetRef, OpaqueTableStorage 

30 

31if TYPE_CHECKING: 31 ↛ 32line 31 didn't jump to line 32, because the condition on line 31 was never true

32 from ...core import StoredDatastoreItemInfo 

33 

34 

35class EphemeralDatastoreRegistryBridge(DatastoreRegistryBridge): 

36 """An implementation of `DatastoreRegistryBridge` for ephemeral datastores 

37 - those whose artifacts never outlive the current process. 

38 

39 Parameters 

40 ---------- 

41 datastoreName : `str` 

42 Name of the `Datastore` as it should appear in `Registry` tables 

43 referencing it. 

44 

45 Notes 

46 ----- 

47 The current implementation just uses a Python set to remember the dataset 

48 IDs associated with the datastore. This will probably need to be converted 

49 to use in-database temporary tables instead in the future to support 

50 "in-datastore" constraints in `Registry.queryDatasets`. 

51 """ 

52 

53 def __init__(self, datastoreName: str): 

54 super().__init__(datastoreName) 

55 self._datasetIds: Set[DatasetId] = set() 

56 self._trashedIds: Set[DatasetId] = set() 

57 

58 def insert(self, refs: Iterable[DatasetIdRef]) -> None: 

59 # Docstring inherited from DatastoreRegistryBridge 

60 self._datasetIds.update(ref.getCheckedId() for ref in refs) 

61 

62 def forget(self, refs: Iterable[DatasetIdRef]) -> None: 

63 self._datasetIds.difference_update(ref.id for ref in refs) 

64 

65 def moveToTrash(self, refs: Iterable[DatasetIdRef]) -> None: 

66 # Docstring inherited from DatastoreRegistryBridge 

67 self._trashedIds.update(ref.getCheckedId() for ref in refs) 

68 

69 def check(self, refs: Iterable[DatasetIdRef]) -> Iterable[DatasetIdRef]: 

70 # Docstring inherited from DatastoreRegistryBridge 

71 yield from (ref for ref in refs if ref in self) 

72 

73 def __contains__(self, ref: DatasetIdRef) -> bool: 

74 return ref.getCheckedId() in self._datasetIds and ref.getCheckedId() not in self._trashedIds 

75 

76 @contextmanager 

77 def emptyTrash( 

78 self, 

79 records_table: Optional[OpaqueTableStorage] = None, 

80 record_class: Optional[Type[StoredDatastoreItemInfo]] = None, 

81 record_column: Optional[str] = None, 

82 ) -> Iterator[ 

83 Tuple[Iterable[Tuple[DatasetIdRef, Optional[StoredDatastoreItemInfo]]], Optional[Set[str]]] 

84 ]: 

85 # Docstring inherited from DatastoreRegistryBridge 

86 matches: Iterable[Tuple[FakeDatasetRef, Optional[StoredDatastoreItemInfo]]] = () 

87 if isinstance(records_table, OpaqueTableStorage): 

88 if record_class is None: 

89 raise ValueError("Record class must be provided if records table is given.") 

90 matches = ( 

91 (FakeDatasetRef(id), record_class.from_record(record)) 

92 for id in self._trashedIds 

93 for record in records_table.fetch(dataset_id=id) 

94 ) 

95 else: 

96 matches = ((FakeDatasetRef(id), None) for id in self._trashedIds) 

97 

98 # Indicate to caller that we do not know about artifacts that 

99 # should be retained. 

100 yield ((matches, None)) 

101 

102 if isinstance(records_table, OpaqueTableStorage): 

103 # Remove the records entries 

104 records_table.delete(["dataset_id"], *[{"dataset_id": id} for id in self._trashedIds]) 

105 

106 # Empty the trash table 

107 self._datasetIds.difference_update(self._trashedIds) 

108 self._trashedIds = set()