Coverage for python/lsst/daf/butler/registry/dimensions/caching.py : 93%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ["CachingDimensionRecordStorage"]
25from typing import Any, Dict, Iterable, Mapping, Optional, Set, Union
27import sqlalchemy
29from lsst.utils import doImportType
31from ...core import (
32 DatabaseDimensionElement,
33 DataCoordinate,
34 DataCoordinateIterable,
35 DataCoordinateSet,
36 DimensionElement,
37 DimensionRecord,
38 GovernorDimension,
39 NamedKeyDict,
40 NamedKeyMapping,
41 SpatialRegionDatabaseRepresentation,
42 TimespanDatabaseRepresentation,
43)
44from ..interfaces import (
45 Database,
46 DatabaseDimensionRecordStorage,
47 GovernorDimensionRecordStorage,
48 StaticTablesContext,
49)
50from ..queries import QueryBuilder
53class CachingDimensionRecordStorage(DatabaseDimensionRecordStorage):
54 """A record storage implementation that adds caching to some other nested
55 storage implementation.
57 Parameters
58 ----------
59 nested : `DatabaseDimensionRecordStorage`
60 The other storage to cache fetches from and to delegate all other
61 operations to.
62 """
63 def __init__(self, nested: DatabaseDimensionRecordStorage):
64 self._nested = nested
65 self._cache: Dict[DataCoordinate, Optional[DimensionRecord]] = {}
67 @classmethod
68 def initialize(
69 cls,
70 db: Database,
71 element: DatabaseDimensionElement, *,
72 context: Optional[StaticTablesContext] = None,
73 config: Mapping[str, Any],
74 governors: NamedKeyMapping[GovernorDimension, GovernorDimensionRecordStorage],
75 ) -> DatabaseDimensionRecordStorage:
76 # Docstring inherited from DatabaseDimensionRecordStorage.
77 config = config["nested"]
78 NestedClass = doImportType(config["cls"])
79 if not hasattr(NestedClass, "initialize"): 79 ↛ 80line 79 didn't jump to line 80, because the condition on line 79 was never true
80 raise TypeError(f"Nested class {config['cls']} does not have an initialize() method.")
81 nested = NestedClass.initialize(db, element, context=context, config=config, governors=governors)
82 return cls(nested)
84 @property
85 def element(self) -> DatabaseDimensionElement:
86 # Docstring inherited from DimensionRecordStorage.element.
87 return self._nested.element
89 def clearCaches(self) -> None:
90 # Docstring inherited from DimensionRecordStorage.clearCaches.
91 self._cache.clear()
92 self._nested.clearCaches()
94 def join(
95 self,
96 builder: QueryBuilder, *,
97 regions: Optional[NamedKeyDict[DimensionElement, SpatialRegionDatabaseRepresentation]] = None,
98 timespans: Optional[NamedKeyDict[DimensionElement, TimespanDatabaseRepresentation]] = None,
99 ) -> None:
100 # Docstring inherited from DimensionRecordStorage.
101 return self._nested.join(builder, regions=regions, timespans=timespans)
103 def insert(self, *records: DimensionRecord, replace: bool = False) -> None:
104 # Docstring inherited from DimensionRecordStorage.insert.
105 self._nested.insert(*records, replace=replace)
106 for record in records:
107 self._cache[record.dataId] = record
109 def sync(self, record: DimensionRecord, update: bool = False) -> Union[bool, Dict[str, Any]]:
110 # Docstring inherited from DimensionRecordStorage.sync.
111 inserted_or_updated = self._nested.sync(record, update=update)
112 if inserted_or_updated:
113 self._cache[record.dataId] = record
114 return inserted_or_updated
116 def fetch(self, dataIds: DataCoordinateIterable) -> Iterable[DimensionRecord]:
117 # Docstring inherited from DimensionRecordStorage.fetch.
118 missing: Set[DataCoordinate] = set()
119 for dataId in dataIds:
120 # Use ... as sentinal value so we can also cache None == "no such
121 # record exists".
122 record = self._cache.get(dataId, ...)
123 if record is ...:
124 missing.add(dataId)
125 elif record is not None: 125 ↛ 119line 125 didn't jump to line 119, because the condition on line 125 was never false
126 # Unclear why MyPy can't tell that this isn't ..., but it
127 # thinks it's still a possibility.
128 yield record # type: ignore
129 if missing:
130 toFetch = DataCoordinateSet(missing, graph=self.element.graph)
131 for record in self._nested.fetch(toFetch):
132 self._cache[record.dataId] = record
133 yield record
134 missing -= self._cache.keys()
135 for dataId in missing: 135 ↛ 136line 135 didn't jump to line 136, because the loop on line 135 never started
136 self._cache[dataId] = None
138 def digestTables(self) -> Iterable[sqlalchemy.schema.Table]:
139 # Docstring inherited from DimensionRecordStorage.digestTables.
140 return self._nested.digestTables()