Coverage for python/lsst/daf/butler/registry/interfaces/_obscore.py: 98%
30 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-05 10:00 +0000
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-05 10:00 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Interfaces for classes that manage obscore table(s) in a `Registry`.
29"""
31from __future__ import annotations
33__all__ = ["ObsCoreTableManager"]
35from abc import abstractmethod
36from collections.abc import Iterable, Iterator, Mapping
37from contextlib import contextmanager
38from typing import TYPE_CHECKING, Any
40import sqlalchemy
42from ._versioning import VersionedExtension, VersionTuple
44if TYPE_CHECKING:
45 from lsst.sphgeom import Region
47 from ..._dataset_ref import DatasetRef
48 from ...dimensions import DimensionUniverse
49 from ..queries import SqlQueryContext
50 from ._collections import CollectionRecord
51 from ._database import Database, StaticTablesContext
52 from ._datasets import DatasetRecordStorageManager
53 from ._dimensions import DimensionRecordStorageManager
56class ObsCoreTableManager(VersionedExtension):
57 """An interface for populating ObsCore tables(s).
59 Parameters
60 ----------
61 registry_schema_version : `VersionTuple` or `None`, optional
62 Version of registry schema.
63 """
65 def __init__(self, *, registry_schema_version: VersionTuple | None = None):
66 super().__init__(registry_schema_version=registry_schema_version)
68 @abstractmethod
69 def clone(
70 self,
71 *,
72 db: Database,
73 dimensions: DimensionRecordStorageManager,
74 ) -> ObsCoreTableManager:
75 """Make an independent copy of this manager instance bound to new
76 instances of `Database` and other managers.
78 Parameters
79 ----------
80 db : `Database`
81 New `Database` object to use when instantiating the manager.
82 dimensions : `DimensionRecordStorageManager`
83 New `DimensionRecordStorageManager` object to use when
84 instantiating the manager.
86 Returns
87 -------
88 instance : `ObsCoreTableManager`
89 New manager instance with the same configuration as this instance,
90 but bound to a new Database object.
91 """
92 raise NotImplementedError()
94 @classmethod
95 @abstractmethod
96 def initialize(
97 cls,
98 db: Database,
99 context: StaticTablesContext,
100 *,
101 universe: DimensionUniverse,
102 config: Mapping,
103 datasets: type[DatasetRecordStorageManager],
104 dimensions: DimensionRecordStorageManager,
105 registry_schema_version: VersionTuple | None = None,
106 ) -> ObsCoreTableManager:
107 """Construct an instance of the manager.
109 Parameters
110 ----------
111 db : `Database`
112 Interface to the underlying database engine and namespace.
113 context : `StaticTablesContext`
114 Context object obtained from `Database.declareStaticTables`; used
115 to declare any tables that should always be present in a layer
116 implemented with this manager.
117 universe : `DimensionUniverse`
118 All dimensions known to the registry.
119 config : `dict` [ `str`, `Any` ]
120 Configuration of the obscore manager.
121 datasets : `type`
122 Type of dataset manager.
123 dimensions : `DimensionRecordStorageManager`
124 Manager for Registry dimensions.
125 registry_schema_version : `VersionTuple` or `None`
126 Schema version of this extension as defined in registry.
128 Returns
129 -------
130 manager : `ObsCoreTableManager`
131 An instance of a concrete `ObsCoreTableManager` subclass.
132 """
133 raise NotImplementedError()
135 @abstractmethod
136 def config_json(self) -> str:
137 """Dump configuration in JSON format.
139 Returns
140 -------
141 json : `str`
142 Configuration serialized in JSON format.
143 """
144 raise NotImplementedError()
146 @abstractmethod
147 def add_datasets(self, refs: Iterable[DatasetRef], context: SqlQueryContext) -> int:
148 """Possibly add datasets to the obscore table.
150 This method should be called when new datasets are added to a RUN
151 collection.
153 Parameters
154 ----------
155 refs : `iterable` [ `DatasetRef` ]
156 Dataset refs to add. Dataset refs have to be completely expanded.
157 If a record with the same dataset ID is already in obscore table,
158 the dataset is ignored.
159 context : `SqlQueryContext`
160 Context used to execute queries for additional dimension metadata.
162 Returns
163 -------
164 count : `int`
165 Actual number of records inserted into obscore table.
167 Notes
168 -----
169 Dataset data types and collection names are checked against configured
170 list of collections and dataset types, non-matching datasets are
171 ignored and not added to the obscore table.
173 When configuration parameter ``collection_type`` is not "RUN", this
174 method should return immediately.
176 Note that there is no matching method to remove datasets from obscore
177 table, we assume that removal happens via foreign key constraint to
178 dataset table with "ON DELETE CASCADE" option.
179 """
180 raise NotImplementedError()
182 @abstractmethod
183 def associate(
184 self, refs: Iterable[DatasetRef], collection: CollectionRecord, context: SqlQueryContext
185 ) -> int:
186 """Possibly add datasets to the obscore table.
188 This method should be called when existing datasets are associated with
189 a TAGGED collection.
191 Parameters
192 ----------
193 refs : `iterable` [ `DatasetRef` ]
194 Dataset refs to add. Dataset refs have to be completely expanded.
195 If a record with the same dataset ID is already in obscore table,
196 the dataset is ignored.
197 collection : `CollectionRecord`
198 Collection record for a TAGGED collection.
199 context : `SqlQueryContext`
200 Context used to execute queries for additional dimension metadata.
202 Returns
203 -------
204 count : `int`
205 Actual number of records inserted into obscore table.
207 Notes
208 -----
209 Dataset data types and collection names are checked against configured
210 list of collections and dataset types, non-matching datasets are
211 ignored and not added to the obscore table.
213 When configuration parameter ``collection_type`` is not "TAGGED", this
214 method should return immediately.
215 """
216 raise NotImplementedError()
218 @abstractmethod
219 def disassociate(self, refs: Iterable[DatasetRef], collection: CollectionRecord) -> int:
220 """Possibly remove datasets from the obscore table.
222 This method should be called when datasets are disassociated from a
223 TAGGED collection.
225 Parameters
226 ----------
227 refs : `iterable` [ `DatasetRef` ]
228 Dataset refs to remove. Dataset refs have to be resolved.
229 collection : `CollectionRecord`
230 Collection record for a TAGGED collection.
232 Returns
233 -------
234 count : `int`
235 Actual number of records removed from obscore table.
237 Notes
238 -----
239 Dataset data types and collection names are checked against configured
240 list of collections and dataset types, non-matching datasets are
241 ignored and not added to the obscore table.
243 When configuration parameter ``collection_type`` is not "TAGGED", this
244 method should return immediately.
245 """
246 raise NotImplementedError()
248 @abstractmethod
249 def update_exposure_regions(self, instrument: str, region_data: Iterable[tuple[int, int, Region]]) -> int:
250 """Update existing exposure records with spatial region data.
252 Parameters
253 ----------
254 instrument : `str`
255 Instrument name.
256 region_data : `~collections.abc.Iterable` [`tuple` [`int`, `int`, \
257 `~lsst.sphgeom.Region` ]]
258 Sequence of tuples, each tuple contains three values - exposure ID,
259 detector ID, and corresponding region.
261 Returns
262 -------
263 count : `int`
264 Actual number of records updated.
266 Notes
267 -----
268 This method is needed to update obscore records for raw exposures which
269 are ingested before their corresponding visits are defined. Exposure
270 records added when visit is already defined will get their regions
271 from their matching visits automatically.
272 """
273 raise NotImplementedError()
275 @abstractmethod
276 @contextmanager
277 def query(
278 self, columns: Iterable[str | sqlalchemy.sql.expression.ColumnElement] | None = None, /, **kwargs: Any
279 ) -> Iterator[sqlalchemy.engine.CursorResult]:
280 """Run a SELECT query against obscore table and return result rows.
282 Parameters
283 ----------
284 columns : `~collections.abc.Iterable` [`str`]
285 Columns to return from query. It is a sequence which can include
286 column names or any other column elements (e.g.
287 `sqlalchemy.sql.functions.count` function).
288 **kwargs
289 Restriction on values of individual obscore columns. Key is the
290 column name, value is the required value of the column. Multiple
291 restrictions are ANDed together.
293 Returns
294 -------
295 result_context : `sqlalchemy.engine.CursorResult`
296 Context manager that returns the query result object when entered.
297 These results are invalidated when the context is exited.
298 """
299 raise NotImplementedError()