Coverage for python/lsst/daf/butler/registry/dimensions/caching.py : 96%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ["CachingDimensionRecordStorage"]
25from typing import Any, Dict, Iterable, Mapping, Optional, Set
27import sqlalchemy
29from lsst.utils import doImport
31from ...core import (
32 DatabaseDimensionElement,
33 DataCoordinate,
34 DataCoordinateIterable,
35 DataCoordinateSet,
36 DimensionElement,
37 DimensionRecord,
38 GovernorDimension,
39 NamedKeyDict,
40 NamedKeyMapping,
41 TimespanDatabaseRepresentation,
42)
43from ..interfaces import (
44 Database,
45 DatabaseDimensionRecordStorage,
46 GovernorDimensionRecordStorage,
47 StaticTablesContext,
48)
49from ..queries import QueryBuilder
52class CachingDimensionRecordStorage(DatabaseDimensionRecordStorage):
53 """A record storage implementation that adds caching to some other nested
54 storage implementation.
56 Parameters
57 ----------
58 nested : `DatabaseDimensionRecordStorage`
59 The other storage to cache fetches from and to delegate all other
60 operations to.
61 """
62 def __init__(self, nested: DatabaseDimensionRecordStorage):
63 self._nested = nested
64 self._cache: Dict[DataCoordinate, Optional[DimensionRecord]] = {}
66 @classmethod
67 def initialize(
68 cls,
69 db: Database,
70 element: DatabaseDimensionElement, *,
71 context: Optional[StaticTablesContext] = None,
72 config: Mapping[str, Any],
73 governors: NamedKeyMapping[GovernorDimension, GovernorDimensionRecordStorage],
74 ) -> DatabaseDimensionRecordStorage:
75 # Docstring inherited from DatabaseDimensionRecordStorage.
76 config = config["nested"]
77 NestedClass = doImport(config["cls"])
78 nested = NestedClass.initialize(db, element, context=context, config=config, governors=governors)
79 return cls(nested)
81 @property
82 def element(self) -> DatabaseDimensionElement:
83 # Docstring inherited from DimensionRecordStorage.element.
84 return self._nested.element
86 def clearCaches(self) -> None:
87 # Docstring inherited from DimensionRecordStorage.clearCaches.
88 self._cache.clear()
89 self._nested.clearCaches()
91 def join(
92 self,
93 builder: QueryBuilder, *,
94 regions: Optional[NamedKeyDict[DimensionElement, sqlalchemy.sql.ColumnElement]] = None,
95 timespans: Optional[NamedKeyDict[DimensionElement, TimespanDatabaseRepresentation]] = None,
96 ) -> None:
97 # Docstring inherited from DimensionRecordStorage.
98 return self._nested.join(builder, regions=regions, timespans=timespans)
100 def insert(self, *records: DimensionRecord) -> None:
101 # Docstring inherited from DimensionRecordStorage.insert.
102 self._nested.insert(*records)
103 for record in records:
104 self._cache[record.dataId] = record
106 def sync(self, record: DimensionRecord) -> bool:
107 # Docstring inherited from DimensionRecordStorage.sync.
108 inserted = self._nested.sync(record)
109 if inserted:
110 self._cache[record.dataId] = record
111 return inserted
113 def fetch(self, dataIds: DataCoordinateIterable) -> Iterable[DimensionRecord]:
114 # Docstring inherited from DimensionRecordStorage.fetch.
115 missing: Set[DataCoordinate] = set()
116 for dataId in dataIds:
117 # Use ... as sentinal value so we can also cache None == "no such
118 # record exists".
119 record = self._cache.get(dataId, ...)
120 if record is ...:
121 missing.add(dataId)
122 elif record is not None: 122 ↛ 116line 122 didn't jump to line 116, because the condition on line 122 was never false
123 # Unclear why MyPy can't tell that this isn't ..., but it
124 # thinks it's still a possibility.
125 yield record # type: ignore
126 if missing:
127 toFetch = DataCoordinateSet(missing, graph=self.element.graph)
128 for record in self._nested.fetch(toFetch):
129 self._cache[record.dataId] = record
130 yield record
131 missing -= self._cache.keys()
132 for dataId in missing: 132 ↛ 133line 132 didn't jump to line 133, because the loop on line 132 never started
133 self._cache[dataId] = None
135 def digestTables(self) -> Iterable[sqlalchemy.schema.Table]:
136 # Docstring inherited from DimensionRecordStorage.digestTables.
137 return self._nested.digestTables()