Coverage for python / lsst / daf / butler / registry / _registry.py: 98%
118 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-26 08:48 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-26 08:48 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28from __future__ import annotations
30__all__ = ("CollectionArgType", "Registry")
32import contextlib
33import logging
34import re
35from abc import ABC, abstractmethod
36from collections.abc import Iterable, Iterator, Mapping, Sequence
37from types import EllipsisType
38from typing import TYPE_CHECKING, Any, TypeAlias
40from .._collection_type import CollectionType
41from .._dataset_association import DatasetAssociation
42from .._dataset_ref import DatasetId, DatasetIdGenEnum, DatasetRef
43from .._dataset_type import DatasetType
44from .._storage_class import StorageClassFactory
45from .._timespan import Timespan
46from ..dimensions import (
47 DataCoordinate,
48 DataId,
49 DimensionElement,
50 DimensionGroup,
51 DimensionRecord,
52 DimensionUniverse,
53)
54from ._collection_summary import CollectionSummary
55from ._defaults import RegistryDefaults
56from .queries import DataCoordinateQueryResults, DatasetQueryResults, DimensionRecordQueryResults
57from .wildcards import CollectionWildcard
59if TYPE_CHECKING:
60 from .interfaces import ObsCoreTableManager
62_LOG = logging.getLogger(__name__)
64# TYpe alias for `collections` arguments.
65CollectionArgType: TypeAlias = (
66 str | re.Pattern | Iterable[str | re.Pattern] | EllipsisType | CollectionWildcard
67)
70class Registry(ABC):
71 """Abstract Registry interface.
73 All subclasses should store `~lsst.daf.butler.registry.RegistryDefaults` in
74 a ``_defaults`` property. No other properties are assumed shared between
75 implementations.
77 See Also
78 --------
79 lsst.daf.butler.Butler
80 """
82 @abstractmethod
83 def isWriteable(self) -> bool:
84 """Return `True` if this registry allows write operations, and `False`
85 otherwise.
86 """
87 raise NotImplementedError()
89 @property
90 @abstractmethod
91 def dimensions(self) -> DimensionUniverse:
92 """Definitions of all dimensions recognized by this `Registry`
93 (`DimensionUniverse`).
94 """
95 raise NotImplementedError()
97 @property
98 @abstractmethod
99 def defaults(self) -> RegistryDefaults:
100 """Default collection search path and/or output `~CollectionType.RUN`
101 collection (`~lsst.daf.butler.registry.RegistryDefaults`).
103 This is an immutable struct whose components may not be set
104 individually, but the entire struct can be set by assigning to this
105 property.
106 """
107 raise NotImplementedError()
109 @defaults.setter
110 @abstractmethod
111 def defaults(self, value: RegistryDefaults) -> None:
112 raise NotImplementedError()
114 @abstractmethod
115 def refresh(self) -> None:
116 """Refresh all in-memory state by querying the database.
118 This may be necessary to enable querying for entities added by other
119 registry instances after this one was constructed.
120 """
121 raise NotImplementedError()
123 @abstractmethod
124 def refresh_collection_summaries(self) -> None:
125 """Refresh content of the collection summary tables in the database.
127 This method can be run to cleanup the contents of the summary tables
128 after removing datasets from collections.
129 """
130 raise NotImplementedError()
132 @abstractmethod
133 def caching_context(self) -> contextlib.AbstractContextManager[None]:
134 """Context manager that enables caching."""
135 raise NotImplementedError()
137 @contextlib.contextmanager
138 @abstractmethod
139 def transaction(self, *, savepoint: bool = False) -> Iterator[None]:
140 """Return a context manager that represents a transaction.
142 Parameters
143 ----------
144 savepoint : `bool`
145 Whether to create a SAVEPOINT or not.
146 """
147 raise NotImplementedError()
149 def resetConnectionPool(self) -> None:
150 """Reset connection pool for registry if relevant.
152 This operation can be used reset connections to servers when
153 using registry with fork-based multiprocessing. This method should
154 usually be called by the child process immediately
155 after the fork.
157 The base class implementation is a no-op.
158 """
159 pass
161 @abstractmethod
162 def registerCollection(
163 self, name: str, type: CollectionType = CollectionType.TAGGED, doc: str | None = None
164 ) -> bool:
165 """Add a new collection if one with the given name does not exist.
167 Parameters
168 ----------
169 name : `str`
170 The name of the collection to create.
171 type : `CollectionType`
172 Enum value indicating the type of collection to create.
173 doc : `str`, optional
174 Documentation string for the collection.
176 Returns
177 -------
178 registered : `bool`
179 Boolean indicating whether the collection was already registered
180 or was created by this call.
182 Notes
183 -----
184 This method cannot be called within transactions, as it needs to be
185 able to perform its own transaction to be concurrent.
186 """
187 raise NotImplementedError()
189 @abstractmethod
190 def getCollectionType(self, name: str) -> CollectionType:
191 """Return an enumeration value indicating the type of the given
192 collection.
194 Parameters
195 ----------
196 name : `str`
197 The name of the collection.
199 Returns
200 -------
201 type : `CollectionType`
202 Enum value indicating the type of this collection.
204 Raises
205 ------
206 lsst.daf.butler.registry.MissingCollectionError
207 Raised if no collection with the given name exists.
208 """
209 raise NotImplementedError()
211 @abstractmethod
212 def registerRun(self, name: str, doc: str | None = None) -> bool:
213 """Add a new run if one with the given name does not exist.
215 Parameters
216 ----------
217 name : `str`
218 The name of the run to create.
219 doc : `str`, optional
220 Documentation string for the collection.
222 Returns
223 -------
224 registered : `bool`
225 Boolean indicating whether a new run was registered. `False`
226 if it already existed.
228 Notes
229 -----
230 This method cannot be called within transactions, as it needs to be
231 able to perform its own transaction to be concurrent.
232 """
233 raise NotImplementedError()
235 @abstractmethod
236 def removeCollection(self, name: str) -> None:
237 """Remove the given collection from the registry.
239 Parameters
240 ----------
241 name : `str`
242 The name of the collection to remove.
244 Raises
245 ------
246 lsst.daf.butler.registry.MissingCollectionError
247 Raised if no collection with the given name exists.
248 sqlalchemy.exc.IntegrityError
249 Raised if the database rows associated with the collection are
250 still referenced by some other table, such as a dataset in a
251 datastore (for `~CollectionType.RUN` collections only) or a
252 `~CollectionType.CHAINED` collection of which this collection is
253 a child.
255 Notes
256 -----
257 If this is a `~CollectionType.RUN` collection, all datasets and quanta
258 in it will removed from the `Registry` database. This requires that
259 those datasets be removed (or at least trashed) from any datastores
260 that hold them first.
262 A collection may not be deleted as long as it is referenced by a
263 `~CollectionType.CHAINED` collection; the ``CHAINED`` collection must
264 be deleted or redefined first.
265 """
266 raise NotImplementedError()
268 @abstractmethod
269 def getCollectionChain(self, parent: str) -> Sequence[str]:
270 """Return the child collections in a `~CollectionType.CHAINED`
271 collection.
273 Parameters
274 ----------
275 parent : `str`
276 Name of the chained collection. Must have already been added via
277 a call to `Registry.registerCollection`.
279 Returns
280 -------
281 children : `~collections.abc.Sequence` [ `str` ]
282 An ordered sequence of collection names that are searched when the
283 given chained collection is searched.
285 Raises
286 ------
287 lsst.daf.butler.registry.MissingCollectionError
288 Raised if ``parent`` does not exist in the `Registry`.
289 lsst.daf.butler.registry.CollectionTypeError
290 Raised if ``parent`` does not correspond to a
291 `~CollectionType.CHAINED` collection.
292 """
293 raise NotImplementedError()
295 @abstractmethod
296 def setCollectionChain(self, parent: str, children: Any, *, flatten: bool = False) -> None:
297 """Define or redefine a `~CollectionType.CHAINED` collection.
299 Parameters
300 ----------
301 parent : `str`
302 Name of the chained collection. Must have already been added via
303 a call to `Registry.registerCollection`.
304 children : collection expression
305 An expression defining an ordered search of child collections,
306 generally an iterable of `str`; see
307 :ref:`daf_butler_collection_expressions` for more information.
308 flatten : `bool`, optional
309 If `True` (`False` is default), recursively flatten out any nested
310 `~CollectionType.CHAINED` collections in ``children`` first.
312 Raises
313 ------
314 lsst.daf.butler.registry.MissingCollectionError
315 Raised when any of the given collections do not exist in the
316 `Registry`.
317 lsst.daf.butler.registry.CollectionTypeError
318 Raised if ``parent`` does not correspond to a
319 `~CollectionType.CHAINED` collection.
320 ValueError
321 Raised if the given collections contains a cycle.
322 """
323 raise NotImplementedError()
325 @abstractmethod
326 def getCollectionParentChains(self, collection: str) -> set[str]:
327 """Return the CHAINED collections that directly contain the given one.
329 Parameters
330 ----------
331 collection : `str`
332 Name of the collection.
334 Returns
335 -------
336 chains : `set` of `str`
337 Set of `~CollectionType.CHAINED` collection names.
338 """
339 raise NotImplementedError()
341 @abstractmethod
342 def getCollectionDocumentation(self, collection: str) -> str | None:
343 """Retrieve the documentation string for a collection.
345 Parameters
346 ----------
347 collection : `str`
348 Name of the collection.
350 Returns
351 -------
352 docs : `str` or `None`
353 Docstring for the collection with the given name.
354 """
355 raise NotImplementedError()
357 @abstractmethod
358 def setCollectionDocumentation(self, collection: str, doc: str | None) -> None:
359 """Set the documentation string for a collection.
361 Parameters
362 ----------
363 collection : `str`
364 Name of the collection.
365 doc : `str` or `None`
366 Docstring for the collection with the given name; will replace any
367 existing docstring. Passing `None` will remove any existing
368 docstring.
369 """
370 raise NotImplementedError()
372 @abstractmethod
373 def getCollectionSummary(self, collection: str) -> CollectionSummary:
374 """Return a summary for the given collection.
376 Parameters
377 ----------
378 collection : `str`
379 Name of the collection for which a summary is to be retrieved.
381 Returns
382 -------
383 summary : `~lsst.daf.butler.registry.CollectionSummary`
384 Summary of the dataset types and governor dimension values in
385 this collection.
386 """
387 raise NotImplementedError()
389 @abstractmethod
390 def registerDatasetType(self, datasetType: DatasetType) -> bool:
391 """Add a new `DatasetType` to the Registry.
393 It is not an error to register the same `DatasetType` twice.
395 Parameters
396 ----------
397 datasetType : `DatasetType`
398 The `DatasetType` to be added.
400 Returns
401 -------
402 inserted : `bool`
403 `True` if ``datasetType`` was inserted, `False` if an identical
404 existing `DatasetType` was found. Note that in either case the
405 DatasetType is guaranteed to be defined in the Registry
406 consistently with the given definition.
408 Raises
409 ------
410 ValueError
411 Raised if the dimensions or storage class are invalid.
412 lsst.daf.butler.registry.ConflictingDefinitionError
413 Raised if this `DatasetType` is already registered with a different
414 definition.
416 Notes
417 -----
418 This method cannot be called within transactions, as it needs to be
419 able to perform its own transaction to be concurrent.
420 """
421 raise NotImplementedError()
423 @abstractmethod
424 def removeDatasetType(self, name: str | tuple[str, ...]) -> None:
425 """Remove the named `DatasetType` from the registry.
427 .. warning::
429 Registry implementations can cache the dataset type definitions.
430 This means that deleting the dataset type definition may result in
431 unexpected behavior from other butler processes that are active
432 that have not seen the deletion.
434 Parameters
435 ----------
436 name : `str` or `tuple` [`str`]
437 Name of the type to be removed or tuple containing a list of type
438 names to be removed. Wildcards are allowed.
440 Returns
441 -------
442 None
444 Raises
445 ------
446 lsst.daf.butler.registry.OrphanedRecordError
447 Raised if an attempt is made to remove the dataset type definition
448 when there are already datasets associated with it.
450 Notes
451 -----
452 If the dataset type is not registered the method will return without
453 action.
454 """
455 raise NotImplementedError()
457 @abstractmethod
458 def getDatasetType(self, name: str) -> DatasetType:
459 """Get the `DatasetType`.
461 Parameters
462 ----------
463 name : `str`
464 Name of the type.
466 Returns
467 -------
468 type : `DatasetType`
469 The `DatasetType` associated with the given name.
471 Raises
472 ------
473 lsst.daf.butler.registry.MissingDatasetTypeError
474 Raised if the requested dataset type has not been registered.
476 Notes
477 -----
478 This method handles component dataset types automatically, though most
479 other registry operations do not.
480 """
481 raise NotImplementedError()
483 @abstractmethod
484 def supportsIdGenerationMode(self, mode: DatasetIdGenEnum) -> bool:
485 """Test whether the given dataset ID generation mode is supported by
486 `insertDatasets`.
488 Parameters
489 ----------
490 mode : `DatasetIdGenEnum`
491 Enum value for the mode to test.
493 Returns
494 -------
495 supported : `bool`
496 Whether the given mode is supported.
497 """
498 raise NotImplementedError()
500 @abstractmethod
501 def findDataset(
502 self,
503 datasetType: DatasetType | str,
504 dataId: DataId | None = None,
505 *,
506 collections: CollectionArgType | None = None,
507 timespan: Timespan | None = None,
508 datastore_records: bool = False,
509 **kwargs: Any,
510 ) -> DatasetRef | None:
511 """Find a dataset given its `DatasetType` and data ID.
513 This can be used to obtain a `DatasetRef` that permits the dataset to
514 be read from a `Datastore`. If the dataset is a component and can not
515 be found using the provided dataset type, a dataset ref for the parent
516 will be returned instead but with the correct dataset type.
518 Parameters
519 ----------
520 datasetType : `DatasetType` or `str`
521 A `DatasetType` or the name of one. If this is a `DatasetType`
522 instance, its storage class will be respected and propagated to
523 the output, even if it differs from the dataset type definition
524 in the registry, as long as the storage classes are convertible.
525 dataId : `dict` or `DataCoordinate`, optional
526 A `dict`-like object containing the `Dimension` links that identify
527 the dataset within a collection.
528 collections : collection expression, optional
529 An expression that fully or partially identifies the collections to
530 search for the dataset; see
531 :ref:`daf_butler_collection_expressions` for more information.
532 Defaults to ``self.defaults.collections``.
533 timespan : `Timespan`, optional
534 A timespan that the validity range of the dataset must overlap.
535 If not provided, any `~CollectionType.CALIBRATION` collections
536 matched by the ``collections`` argument will not be searched.
537 datastore_records : `bool`, optional
538 Whether to attach datastore records to the `DatasetRef`.
539 **kwargs
540 Additional keyword arguments passed to
541 `DataCoordinate.standardize` to convert ``dataId`` to a true
542 `DataCoordinate` or augment an existing one.
544 Returns
545 -------
546 ref : `DatasetRef`
547 A reference to the dataset, or `None` if no matching Dataset
548 was found.
550 Raises
551 ------
552 lsst.daf.butler.registry.NoDefaultCollectionError
553 Raised if ``collections`` is `None` and
554 ``self.defaults.collections`` is `None`.
555 LookupError
556 Raised if one or more data ID keys are missing.
557 lsst.daf.butler.registry.MissingDatasetTypeError
558 Raised if the dataset type does not exist.
559 lsst.daf.butler.registry.MissingCollectionError
560 Raised if any of ``collections`` does not exist in the registry.
562 Notes
563 -----
564 This method simply returns `None` and does not raise an exception even
565 when the set of collections searched is intrinsically incompatible with
566 the dataset type, e.g. if ``datasetType.isCalibration() is False``, but
567 only `~CollectionType.CALIBRATION` collections are being searched.
568 This may make it harder to debug some lookup failures, but the behavior
569 is intentional; we consider it more important that failed searches are
570 reported consistently, regardless of the reason, and that adding
571 additional collections that do not contain a match to the search path
572 never changes the behavior.
574 This method handles component dataset types automatically, though most
575 other registry operations do not.
576 """
577 raise NotImplementedError()
579 @abstractmethod
580 def insertDatasets(
581 self,
582 datasetType: DatasetType | str,
583 dataIds: Iterable[DataId],
584 run: str | None = None,
585 expand: bool = True,
586 idGenerationMode: DatasetIdGenEnum = DatasetIdGenEnum.UNIQUE,
587 ) -> list[DatasetRef]:
588 """Insert one or more datasets into the `Registry`.
590 This always adds new datasets; to associate existing datasets with
591 a new collection, use ``associate``.
593 Parameters
594 ----------
595 datasetType : `DatasetType` or `str`
596 A `DatasetType` or the name of one.
597 dataIds : `~collections.abc.Iterable` of `dict` or `DataCoordinate`
598 Dimension-based identifiers for the new datasets.
599 run : `str`, optional
600 The name of the run that produced the datasets. Defaults to
601 ``self.defaults.run``.
602 expand : `bool`, optional
603 If `True` (default), expand data IDs as they are inserted. This is
604 necessary in general to allow datastore to generate file templates,
605 but it may be disabled if the caller can guarantee this is
606 unnecessary.
607 idGenerationMode : `DatasetIdGenEnum`, optional
608 Specifies option for generating dataset IDs. By default unique IDs
609 are generated for each inserted dataset.
611 Returns
612 -------
613 refs : `list` of `DatasetRef`
614 Resolved `DatasetRef` instances for all given data IDs (in the same
615 order).
617 Raises
618 ------
619 lsst.daf.butler.registry.DatasetTypeError
620 Raised if ``datasetType`` is not known to registry.
621 lsst.daf.butler.registry.CollectionTypeError
622 Raised if ``run`` collection type is not `~CollectionType.RUN`.
623 lsst.daf.butler.registry.NoDefaultCollectionError
624 Raised if ``run`` is `None` and ``self.defaults.run`` is `None`.
625 lsst.daf.butler.registry.ConflictingDefinitionError
626 If a dataset with the same dataset type and data ID as one of those
627 given already exists in ``run``.
628 lsst.daf.butler.registry.MissingCollectionError
629 Raised if ``run`` does not exist in the registry.
630 """
631 raise NotImplementedError()
633 @abstractmethod
634 def _importDatasets(
635 self,
636 datasets: Iterable[DatasetRef],
637 expand: bool = True,
638 assume_new: bool = False,
639 ) -> list[DatasetRef]:
640 """Import one or more datasets into the `Registry`.
642 This differs from `insertDatasets` method in that this method accepts
643 `DatasetRef` instances, which already have a dataset ID.
645 Parameters
646 ----------
647 datasets : `~collections.abc.Iterable` of `DatasetRef`
648 Datasets to be inserted. All `DatasetRef` instances must have
649 identical ``run`` attributes. ``run``
650 attribute can be `None` and defaults to ``self.defaults.run``.
651 Datasets can specify ``id`` attribute which will be used for
652 inserted datasets.
653 Datasets can be of multiple dataset types, but all the dataset
654 types must have the same set of dimensions.
655 expand : `bool`, optional
656 If `True` (default), expand data IDs as they are inserted. This is
657 necessary in general, but it may be disabled if the caller can
658 guarantee this is unnecessary.
659 assume_new : `bool`, optional
660 If `True`, assume datasets are new. If `False`, datasets that are
661 identical to an existing one are ignored.
663 Returns
664 -------
665 refs : `list` of `DatasetRef`
666 `DatasetRef` instances for all given data IDs (in the same order).
667 If any of ``datasets`` has an ID which already exists in the
668 database then it will not be inserted or updated, but a
669 `DatasetRef` will be returned for it in any case.
671 Raises
672 ------
673 lsst.daf.butler.registry.NoDefaultCollectionError
674 Raised if ``run`` is `None` and ``self.defaults.run`` is `None`.
675 lsst.daf.butler.registry.DatasetTypeError
676 Raised if a dataset type is not known to registry.
677 lsst.daf.butler.registry.ConflictingDefinitionError
678 If a dataset with the same dataset type and data ID as one of those
679 given already exists in ``run``, or if ``assume_new=True`` and at
680 least one dataset is not new.
681 lsst.daf.butler.registry.MissingCollectionError
682 Raised if ``run`` does not exist in the registry.
684 Notes
685 -----
686 This method is considered middleware-internal.
687 """
688 raise NotImplementedError()
690 @abstractmethod
691 def getDataset(self, id: DatasetId) -> DatasetRef | None:
692 """Retrieve a Dataset entry.
694 Parameters
695 ----------
696 id : `DatasetId`
697 The unique identifier for the dataset.
699 Returns
700 -------
701 ref : `DatasetRef` or `None`
702 A ref to the Dataset, or `None` if no matching Dataset
703 was found.
704 """
705 raise NotImplementedError()
707 @abstractmethod
708 def _fetch_run_dataset_ids(self, run: str) -> list[DatasetId]:
709 """Return the IDs of all datasets in the given ``RUN``
710 collection.
712 Parameters
713 ----------
714 run : `str`
715 Name of the collection.
717 Returns
718 -------
719 dataset_ids : `list` [`uuid.UUID`]
720 List of dataset IDs.
722 Notes
723 -----
724 This is a middleware-internal interface.
725 """
726 raise NotImplementedError()
728 @abstractmethod
729 def removeDatasets(self, refs: Iterable[DatasetRef]) -> None:
730 """Remove datasets from the Registry.
732 The datasets will be removed unconditionally from all collections, and
733 any `Quantum` that consumed this dataset will instead be marked with
734 having a NULL input. `Datastore` records will *not* be deleted; the
735 caller is responsible for ensuring that the dataset has already been
736 removed from all Datastores.
738 Parameters
739 ----------
740 refs : `~collections.abc.Iterable` [`DatasetRef`]
741 References to the datasets to be removed. Must include a valid
742 ``id`` attribute, and should be considered invalidated upon return.
744 Raises
745 ------
746 lsst.daf.butler.AmbiguousDatasetError
747 Raised if any ``ref.id`` is `None`.
748 lsst.daf.butler.registry.OrphanedRecordError
749 Raised if any dataset is still present in any `Datastore`.
750 """
751 raise NotImplementedError()
753 @abstractmethod
754 def associate(self, collection: str, refs: Iterable[DatasetRef]) -> None:
755 """Add existing datasets to a `~CollectionType.TAGGED` collection.
757 If a DatasetRef with the same exact ID is already in a collection
758 nothing is changed. If a `DatasetRef` with the same `DatasetType` and
759 data ID but with different ID exists in the collection,
760 `~lsst.daf.butler.registry.ConflictingDefinitionError` is raised.
762 Parameters
763 ----------
764 collection : `str`
765 Indicates the collection the datasets should be associated with.
766 refs : `~collections.abc.Iterable` [ `DatasetRef` ]
767 An iterable of resolved `DatasetRef` instances that already exist
768 in this `Registry`.
770 Raises
771 ------
772 lsst.daf.butler.registry.ConflictingDefinitionError
773 If a Dataset with the given `DatasetRef` already exists in the
774 given collection.
775 lsst.daf.butler.registry.MissingCollectionError
776 Raised if ``collection`` does not exist in the registry.
777 lsst.daf.butler.registry.CollectionTypeError
778 Raise adding new datasets to the given ``collection`` is not
779 allowed.
780 """
781 raise NotImplementedError()
783 @abstractmethod
784 def disassociate(self, collection: str, refs: Iterable[DatasetRef]) -> None:
785 """Remove existing datasets from a `~CollectionType.TAGGED` collection.
787 ``collection`` and ``ref`` combinations that are not currently
788 associated are silently ignored.
790 Parameters
791 ----------
792 collection : `str`
793 The collection the datasets should no longer be associated with.
794 refs : `~collections.abc.Iterable` [ `DatasetRef` ]
795 An iterable of resolved `DatasetRef` instances that already exist
796 in this `Registry`.
798 Raises
799 ------
800 lsst.daf.butler.AmbiguousDatasetError
801 Raised if any of the given dataset references is unresolved.
802 lsst.daf.butler.registry.MissingCollectionError
803 Raised if ``collection`` does not exist in the registry.
804 lsst.daf.butler.registry.CollectionTypeError
805 Raise adding new datasets to the given ``collection`` is not
806 allowed.
807 """
808 raise NotImplementedError()
810 @abstractmethod
811 def certify(self, collection: str, refs: Iterable[DatasetRef], timespan: Timespan) -> None:
812 """Associate one or more datasets with a calibration collection and a
813 validity range within it.
815 Parameters
816 ----------
817 collection : `str`
818 The name of an already-registered `~CollectionType.CALIBRATION`
819 collection.
820 refs : `~collections.abc.Iterable` [ `DatasetRef` ]
821 Datasets to be associated.
822 timespan : `Timespan`
823 The validity range for these datasets within the collection.
825 Raises
826 ------
827 lsst.daf.butler.AmbiguousDatasetError
828 Raised if any of the given `DatasetRef` instances is unresolved.
829 lsst.daf.butler.registry.ConflictingDefinitionError
830 Raised if the collection already contains a different dataset with
831 the same `DatasetType` and data ID and an overlapping validity
832 range.
833 lsst.daf.butler.registry.CollectionTypeError
834 Raised if ``collection`` is not a `~CollectionType.CALIBRATION`
835 collection or if one or more datasets are of a dataset type for
836 which `DatasetType.isCalibration` returns `False`.
837 """
838 raise NotImplementedError()
840 @abstractmethod
841 def decertify(
842 self,
843 collection: str,
844 datasetType: str | DatasetType,
845 timespan: Timespan,
846 *,
847 dataIds: Iterable[DataId] | None = None,
848 ) -> None:
849 """Remove or adjust datasets to clear a validity range within a
850 calibration collection.
852 Parameters
853 ----------
854 collection : `str`
855 The name of an already-registered `~CollectionType.CALIBRATION`
856 collection.
857 datasetType : `str` or `DatasetType`
858 Name or `DatasetType` instance for the datasets to be decertified.
859 timespan : `Timespan`, optional
860 The validity range to remove datasets from within the collection.
861 Datasets that overlap this range but are not contained by it will
862 have their validity ranges adjusted to not overlap it, which may
863 split a single dataset validity range into two.
864 dataIds : `~collections.abc.Iterable` [`dict` or `DataCoordinate`], \
865 optional
866 Data IDs that should be decertified within the given validity range
867 If `None`, all data IDs for ``self.datasetType`` will be
868 decertified.
870 Raises
871 ------
872 lsst.daf.butler.registry.CollectionTypeError
873 Raised if ``collection`` is not a `~CollectionType.CALIBRATION`
874 collection or if ``datasetType.isCalibration() is False``.
875 """
876 raise NotImplementedError()
878 @abstractmethod
879 def getDatasetLocations(self, ref: DatasetRef) -> Iterable[str]:
880 """Retrieve datastore locations for a given dataset.
882 Parameters
883 ----------
884 ref : `DatasetRef`
885 A reference to the dataset for which to retrieve storage
886 information.
888 Returns
889 -------
890 datastores : `~collections.abc.Iterable` [ `str` ]
891 All the matching datastores holding this dataset.
893 Raises
894 ------
895 lsst.daf.butler.AmbiguousDatasetError
896 Raised if ``ref.id`` is `None`.
897 """
898 raise NotImplementedError()
900 @abstractmethod
901 def expandDataId(
902 self,
903 dataId: DataId | None = None,
904 *,
905 dimensions: Iterable[str] | DimensionGroup | None = None,
906 records: Mapping[str, DimensionRecord | None] | None = None,
907 withDefaults: bool = True,
908 **kwargs: Any,
909 ) -> DataCoordinate:
910 """Expand a dimension-based data ID to include additional information.
912 Parameters
913 ----------
914 dataId : `DataCoordinate` or `dict`, optional
915 Data ID to be expanded; augmented and overridden by ``kwargs``.
916 dimensions : `~collections.abc.Iterable` [ `str` ] or \
917 `DimensionGroup` optional
918 The dimensions to be identified by the new `DataCoordinate`.
919 If not provided, will be inferred from the keys of ``dataId`` and
920 ``**kwargs``, and ``universe`` must be provided unless ``dataId``
921 is already a `DataCoordinate`.
922 records : `~collections.abc.Mapping` [`str`, `DimensionRecord`], \
923 optional
924 Dimension record data to use before querying the database for that
925 data, keyed by element name.
926 withDefaults : `bool`, optional
927 Utilize ``self.defaults.dataId`` to fill in missing governor
928 dimension key-value pairs. Defaults to `True` (i.e. defaults are
929 used).
930 **kwargs
931 Additional keywords are treated like additional key-value pairs for
932 ``dataId``, extending and overriding.
934 Returns
935 -------
936 expanded : `DataCoordinate`
937 A data ID that includes full metadata for all of the dimensions it
938 identifies, i.e. guarantees that ``expanded.hasRecords()`` and
939 ``expanded.hasFull()`` both return `True`.
941 Raises
942 ------
943 lsst.daf.butler.registry.DataIdError
944 Raised when ``dataId`` or keyword arguments specify unknown
945 dimensions or values, or when a resulting data ID contains
946 contradictory key-value pairs, according to dimension
947 relationships.
949 Notes
950 -----
951 This method cannot be relied upon to reject invalid data ID values
952 for dimensions that do actually not have any record columns. For
953 efficiency reasons the records for these dimensions (which have only
954 dimension key values that are given by the caller) may be constructed
955 directly rather than obtained from the registry database.
956 """
957 raise NotImplementedError()
959 @abstractmethod
960 def insertDimensionData(
961 self,
962 element: DimensionElement | str,
963 *data: Mapping[str, Any] | DimensionRecord,
964 conform: bool = True,
965 replace: bool = False,
966 skip_existing: bool = False,
967 ) -> None:
968 """Insert one or more dimension records into the database.
970 Parameters
971 ----------
972 element : `DimensionElement` or `str`
973 The `DimensionElement` or name thereof that identifies the table
974 records will be inserted into.
975 *data : `dict` or `DimensionRecord`
976 One or more records to insert.
977 conform : `bool`, optional
978 If `False` (`True` is default) perform no checking or conversions,
979 and assume that ``element`` is a `DimensionElement` instance and
980 ``data`` is a one or more `DimensionRecord` instances of the
981 appropriate subclass.
982 replace : `bool`, optional
983 If `True` (`False` is default), replace existing records in the
984 database if there is a conflict.
985 skip_existing : `bool`, optional
986 If `True` (`False` is default), skip insertion if a record with
987 the same primary key values already exists. Unlike
988 `syncDimensionData`, this will not detect when the given record
989 differs from what is in the database, and should not be used when
990 this is a concern.
991 """
992 raise NotImplementedError()
994 @abstractmethod
995 def syncDimensionData(
996 self,
997 element: DimensionElement | str,
998 row: Mapping[str, Any] | DimensionRecord,
999 conform: bool = True,
1000 update: bool = False,
1001 ) -> bool | dict[str, Any]:
1002 """Synchronize the given dimension record with the database, inserting
1003 if it does not already exist and comparing values if it does.
1005 Parameters
1006 ----------
1007 element : `DimensionElement` or `str`
1008 The `DimensionElement` or name thereof that identifies the table
1009 records will be inserted into.
1010 row : `dict` or `DimensionRecord`
1011 The record to insert.
1012 conform : `bool`, optional
1013 If `False` (`True` is default) perform no checking or conversions,
1014 and assume that ``element`` is a `DimensionElement` instance and
1015 ``data`` is a one or more `DimensionRecord` instances of the
1016 appropriate subclass.
1017 update : `bool`, optional
1018 If `True` (`False` is default), update the existing record in the
1019 database if there is a conflict.
1021 Returns
1022 -------
1023 inserted_or_updated : `bool` or `dict`
1024 `True` if a new row was inserted, `False` if no changes were
1025 needed, or a `dict` mapping updated column names to their old
1026 values if an update was performed (only possible if
1027 ``update=True``).
1029 Raises
1030 ------
1031 lsst.daf.butler.registry.ConflictingDefinitionError
1032 Raised if the record exists in the database (according to primary
1033 key lookup) but is inconsistent with the given one.
1034 """
1035 raise NotImplementedError()
1037 @abstractmethod
1038 def queryDatasetTypes(
1039 self,
1040 expression: Any = ...,
1041 *,
1042 missing: list[str] | None = None,
1043 ) -> Iterable[DatasetType]:
1044 """Iterate over the dataset types whose names match an expression.
1046 Parameters
1047 ----------
1048 expression : dataset type expression, optional
1049 An expression that fully or partially identifies the dataset types
1050 to return, such as a `str`, `re.Pattern`, or iterable thereof.
1051 ``...`` can be used to return all dataset types, and is the
1052 default. See :ref:`daf_butler_dataset_type_expressions` for more
1053 information.
1054 missing : `list` of `str`, optional
1055 String dataset type names that were explicitly given (i.e. not
1056 regular expression patterns) but not found will be appended to this
1057 list, if it is provided.
1059 Returns
1060 -------
1061 dataset_types : `~collections.abc.Iterable` [ `DatasetType`]
1062 An `~collections.abc.Iterable` of `DatasetType` instances whose
1063 names match ``expression``.
1065 Raises
1066 ------
1067 lsst.daf.butler.registry.DatasetTypeExpressionError
1068 Raised when ``expression`` is invalid.
1069 """
1070 raise NotImplementedError()
1072 @abstractmethod
1073 def queryCollections(
1074 self,
1075 expression: Any = ...,
1076 datasetType: DatasetType | None = None,
1077 collectionTypes: Iterable[CollectionType] | CollectionType = CollectionType.all(),
1078 flattenChains: bool = False,
1079 includeChains: bool | None = None,
1080 ) -> Sequence[str]:
1081 """Iterate over the collections whose names match an expression.
1083 Parameters
1084 ----------
1085 expression : collection expression, optional
1086 An expression that identifies the collections to return, such as
1087 a `str` (for full matches or partial matches via globs),
1088 `re.Pattern` (for partial matches), or iterable thereof. ``...``
1089 can be used to return all collections, and is the default.
1090 See :ref:`daf_butler_collection_expressions` for more information.
1091 datasetType : `DatasetType`, optional
1092 If provided, only yield collections that may contain datasets of
1093 this type. This is a conservative approximation in general; it may
1094 yield collections that do not have any such datasets.
1095 collectionTypes : `~collections.abc.Set` [`CollectionType`] or \
1096 `CollectionType`, optional
1097 If provided, only yield collections of these types.
1098 flattenChains : `bool`, optional
1099 If `True` (`False` is default), recursively yield the child
1100 collections of matching `~CollectionType.CHAINED` collections.
1101 includeChains : `bool`, optional
1102 If `True`, yield records for matching `~CollectionType.CHAINED`
1103 collections. Default is the opposite of ``flattenChains``: include
1104 either CHAINED collections or their children, but not both.
1106 Returns
1107 -------
1108 collections : `~collections.abc.Sequence` [ `str` ]
1109 The names of collections that match ``expression``.
1111 Raises
1112 ------
1113 lsst.daf.butler.registry.CollectionExpressionError
1114 Raised when ``expression`` is invalid.
1116 Notes
1117 -----
1118 The order in which collections are returned is unspecified, except that
1119 the children of a `~CollectionType.CHAINED` collection are guaranteed
1120 to be in the order in which they are searched. When multiple parent
1121 `~CollectionType.CHAINED` collections match the same criteria, the
1122 order in which the two lists appear is unspecified, and the lists of
1123 children may be incomplete if a child has multiple parents.
1124 """
1125 raise NotImplementedError()
1127 @abstractmethod
1128 def queryDatasets(
1129 self,
1130 datasetType: Any,
1131 *,
1132 collections: CollectionArgType | None = None,
1133 dimensions: Iterable[str] | None = None,
1134 dataId: DataId | None = None,
1135 where: str = "",
1136 findFirst: bool = False,
1137 bind: Mapping[str, Any] | None = None,
1138 check: bool = True,
1139 **kwargs: Any,
1140 ) -> DatasetQueryResults:
1141 """Query for and iterate over dataset references matching user-provided
1142 criteria.
1144 Parameters
1145 ----------
1146 datasetType : dataset type expression
1147 An expression that fully or partially identifies the dataset types
1148 to be queried. Allowed types include `DatasetType`, `str`,
1149 `re.Pattern`, and iterables thereof. The special value ``...`` can
1150 be used to query all dataset types. See
1151 :ref:`daf_butler_dataset_type_expressions` for more information.
1152 collections : collection expression, optional
1153 An expression that identifies the collections to search, such as a
1154 `str` (for full matches or partial matches via globs), `re.Pattern`
1155 (for partial matches), or iterable thereof. ``...`` can be used to
1156 search all collections (actually just all `~CollectionType.RUN`
1157 collections, because this will still find all datasets).
1158 If not provided, ``self.default.collections`` is used. See
1159 :ref:`daf_butler_collection_expressions` for more information.
1160 dimensions : `~collections.abc.Iterable` [ `str` ]
1161 Dimensions to include in the query (in addition to those used
1162 to identify the queried dataset type(s)), either to constrain
1163 the resulting datasets to those for which a matching dimension
1164 exists, or to relate the dataset type's dimensions to dimensions
1165 referenced by the ``dataId`` or ``where`` arguments.
1166 dataId : `dict` or `DataCoordinate`, optional
1167 A data ID whose key-value pairs are used as equality constraints
1168 in the query.
1169 where : `str`, optional
1170 A string expression similar to a SQL WHERE clause. May involve
1171 any column of a dimension table or (as a shortcut for the primary
1172 key column of a dimension table) dimension name. See
1173 :ref:`daf_butler_dimension_expressions` for more information.
1174 findFirst : `bool`, optional
1175 If `True` (`False` is default), for each result data ID, only
1176 yield one `DatasetRef` of each `DatasetType`, from the first
1177 collection in which a dataset of that dataset type appears
1178 (according to the order of ``collections`` passed in). If `True`,
1179 ``collections`` must not contain regular expressions and may not
1180 be ``...``.
1181 bind : `~collections.abc.Mapping`, optional
1182 Mapping containing literal values that should be injected into the
1183 ``where`` expression, keyed by the identifiers they replace.
1184 Values of collection type can be expanded in some cases; see
1185 :ref:`daf_butler_dimension_expressions_identifiers` for more
1186 information.
1187 check : `bool`, optional
1188 If `True` (default) check the query for consistency before
1189 executing it. This may reject some valid queries that resemble
1190 common mistakes (e.g. queries for visits without specifying an
1191 instrument).
1192 **kwargs
1193 Additional keyword arguments are forwarded to
1194 `DataCoordinate.standardize` when processing the ``dataId``
1195 argument (and may be used to provide a constraining data ID even
1196 when the ``dataId`` argument is `None`).
1198 Returns
1199 -------
1200 refs : `.queries.DatasetQueryResults`
1201 Dataset references matching the given query criteria. Nested data
1202 IDs are guaranteed to include values for all implied dimensions
1203 (i.e. `DataCoordinate.hasFull` will return `True`), but will not
1204 include dimension records (`DataCoordinate.hasRecords` will be
1205 `False`) unless `~.queries.DatasetQueryResults.expanded` is
1206 called on the result object (which returns a new one).
1208 Raises
1209 ------
1210 lsst.daf.butler.registry.DatasetTypeExpressionError
1211 Raised when ``datasetType`` expression is invalid.
1212 TypeError
1213 Raised when the arguments are incompatible, such as when a
1214 collection wildcard is passed when ``findFirst`` is `True`, or
1215 when ``collections`` is `None` and ``self.defaults.collections`` is
1216 also `None`.
1217 lsst.daf.butler.registry.DataIdError
1218 Raised when ``dataId`` or keyword arguments specify unknown
1219 dimensions or values, or when they contain inconsistent values.
1220 lsst.daf.butler.registry.UserExpressionError
1221 Raised when ``where`` expression is invalid.
1223 Notes
1224 -----
1225 When multiple dataset types are queried in a single call, the
1226 results of this operation are equivalent to querying for each dataset
1227 type separately in turn, and no information about the relationships
1228 between datasets of different types is included. In contexts where
1229 that kind of information is important, the recommended pattern is to
1230 use `queryDataIds` to first obtain data IDs (possibly with the
1231 desired dataset types and collections passed as constraints to the
1232 query), and then use multiple (generally much simpler) calls to
1233 `queryDatasets` with the returned data IDs passed as constraints.
1234 """
1235 raise NotImplementedError()
1237 @abstractmethod
1238 def queryDataIds(
1239 self,
1240 dimensions: DimensionGroup | Iterable[str] | str,
1241 *,
1242 dataId: DataId | None = None,
1243 datasets: Any = None,
1244 collections: CollectionArgType | None = None,
1245 where: str = "",
1246 bind: Mapping[str, Any] | None = None,
1247 check: bool = True,
1248 **kwargs: Any,
1249 ) -> DataCoordinateQueryResults:
1250 """Query for data IDs matching user-provided criteria.
1252 Parameters
1253 ----------
1254 dimensions : `DimensionGroup`, `str`, or \
1255 `~collections.abc.Iterable` [ `str` ]
1256 The dimensions of the data IDs to yield. Will be automatically
1257 expanded to a complete `DimensionGroup`.
1258 dataId : `dict` or `DataCoordinate`, optional
1259 A data ID whose key-value pairs are used as equality constraints
1260 in the query.
1261 datasets : dataset type expression, optional
1262 An expression that fully or partially identifies dataset types
1263 that should constrain the yielded data IDs. For example, including
1264 "raw" here would constrain the yielded ``instrument``,
1265 ``exposure``, ``detector``, and ``physical_filter`` values to only
1266 those for which at least one "raw" dataset exists in
1267 ``collections``. Allowed types include `DatasetType`, `str`,
1268 and iterables thereof. See
1269 :ref:`daf_butler_dataset_type_expressions` for more information.
1270 collections : collection expression, optional
1271 An expression that identifies the collections to search for
1272 datasets, such as a `str` (for full matches or partial matches
1273 via globs), `re.Pattern` (for partial matches), or iterable
1274 thereof. ``...`` can be used to search all collections (actually
1275 just all `~CollectionType.RUN` collections, because this will
1276 still find all datasets). If not provided,
1277 ``self.default.collections`` is used. Ignored unless ``datasets``
1278 is also passed. See :ref:`daf_butler_collection_expressions` for
1279 more information.
1280 where : `str`, optional
1281 A string expression similar to a SQL WHERE clause. May involve
1282 any column of a dimension table or (as a shortcut for the primary
1283 key column of a dimension table) dimension name. See
1284 :ref:`daf_butler_dimension_expressions` for more information.
1285 bind : `~collections.abc.Mapping`, optional
1286 Mapping containing literal values that should be injected into the
1287 ``where`` expression, keyed by the identifiers they replace.
1288 Values of collection type can be expanded in some cases; see
1289 :ref:`daf_butler_dimension_expressions_identifiers` for more
1290 information.
1291 check : `bool`, optional
1292 If `True` (default) check the query for consistency before
1293 executing it. This may reject some valid queries that resemble
1294 common mistakes (e.g. queries for visits without specifying an
1295 instrument).
1296 **kwargs
1297 Additional keyword arguments are forwarded to
1298 `DataCoordinate.standardize` when processing the ``dataId``
1299 argument (and may be used to provide a constraining data ID even
1300 when the ``dataId`` argument is `None`).
1302 Returns
1303 -------
1304 dataIds : `.queries.DataCoordinateQueryResults`
1305 Data IDs matching the given query parameters. These are guaranteed
1306 to identify all dimensions (`DataCoordinate.hasFull` returns
1307 `True`), but will not contain `DimensionRecord` objects
1308 (`DataCoordinate.hasRecords` returns `False`). Call
1309 `~.queries.DataCoordinateQueryResults.expanded` on the
1310 returned object to fetch those (and consider using
1311 `~.queries.DataCoordinateQueryResults.materialize` on the
1312 returned object first if the expected number of rows is very
1313 large). See documentation for those methods for additional
1314 information.
1316 Raises
1317 ------
1318 lsst.daf.butler.registry.NoDefaultCollectionError
1319 Raised if ``collections`` is `None` and
1320 ``self.defaults.collections`` is `None`.
1321 lsst.daf.butler.registry.CollectionExpressionError
1322 Raised when ``collections`` expression is invalid.
1323 lsst.daf.butler.registry.DataIdError
1324 Raised when ``dataId`` or keyword arguments specify unknown
1325 dimensions or values, or when they contain inconsistent values.
1326 lsst.daf.butler.registry.DatasetTypeExpressionError
1327 Raised when ``datasetType`` expression is invalid.
1328 lsst.daf.butler.registry.UserExpressionError
1329 Raised when ``where`` expression is invalid.
1330 """
1331 raise NotImplementedError()
1333 @abstractmethod
1334 def queryDimensionRecords(
1335 self,
1336 element: DimensionElement | str,
1337 *,
1338 dataId: DataId | None = None,
1339 datasets: Any = None,
1340 collections: CollectionArgType | None = None,
1341 where: str = "",
1342 bind: Mapping[str, Any] | None = None,
1343 check: bool = True,
1344 **kwargs: Any,
1345 ) -> DimensionRecordQueryResults:
1346 """Query for dimension information matching user-provided criteria.
1348 Parameters
1349 ----------
1350 element : `DimensionElement` or `str`
1351 The dimension element to obtain records for.
1352 dataId : `dict` or `DataCoordinate`, optional
1353 A data ID whose key-value pairs are used as equality constraints
1354 in the query.
1355 datasets : dataset type expression, optional
1356 An expression that fully or partially identifies dataset types
1357 that should constrain the yielded records. See `queryDataIds` and
1358 :ref:`daf_butler_dataset_type_expressions` for more information.
1359 collections : collection expression, optional
1360 An expression that identifies the collections to search for
1361 datasets, such as a `str` (for full matches or partial matches
1362 via globs), `re.Pattern` (for partial matches), or iterable
1363 thereof. ``...`` can be used to search all collections (actually
1364 just all `~CollectionType.RUN` collections, because this will
1365 still find all datasets). If not provided,
1366 ``self.default.collections`` is used. Ignored unless ``datasets``
1367 is also passed. See :ref:`daf_butler_collection_expressions` for
1368 more information.
1369 where : `str`, optional
1370 A string expression similar to a SQL WHERE clause. See
1371 `queryDataIds` and :ref:`daf_butler_dimension_expressions` for more
1372 information.
1373 bind : `~collections.abc.Mapping`, optional
1374 Mapping containing literal values that should be injected into the
1375 ``where`` expression, keyed by the identifiers they replace.
1376 Values of collection type can be expanded in some cases; see
1377 :ref:`daf_butler_dimension_expressions_identifiers` for more
1378 information.
1379 check : `bool`, optional
1380 If `True` (default) check the query for consistency before
1381 executing it. This may reject some valid queries that resemble
1382 common mistakes (e.g. queries for visits without specifying an
1383 instrument).
1384 **kwargs
1385 Additional keyword arguments are forwarded to
1386 `DataCoordinate.standardize` when processing the ``dataId``
1387 argument (and may be used to provide a constraining data ID even
1388 when the ``dataId`` argument is `None`).
1390 Returns
1391 -------
1392 dataIds : `.queries.DimensionRecordQueryResults`
1393 Data IDs matching the given query parameters.
1395 Raises
1396 ------
1397 lsst.daf.butler.registry.NoDefaultCollectionError
1398 Raised if ``collections`` is `None` and
1399 ``self.defaults.collections`` is `None`.
1400 lsst.daf.butler.registry.CollectionExpressionError
1401 Raised when ``collections`` expression is invalid.
1402 lsst.daf.butler.registry.DataIdError
1403 Raised when ``dataId`` or keyword arguments specify unknown
1404 dimensions or values, or when they contain inconsistent values.
1405 lsst.daf.butler.registry.DatasetTypeExpressionError
1406 Raised when ``datasetType`` expression is invalid.
1407 lsst.daf.butler.registry.UserExpressionError
1408 Raised when ``where`` expression is invalid.
1409 """
1410 raise NotImplementedError()
1412 @abstractmethod
1413 def queryDatasetAssociations(
1414 self,
1415 datasetType: str | DatasetType,
1416 collections: CollectionArgType | None = ...,
1417 *,
1418 collectionTypes: Iterable[CollectionType] = CollectionType.all(),
1419 flattenChains: bool = False,
1420 ) -> Iterator[DatasetAssociation]:
1421 """Iterate over dataset-collection combinations where the dataset is in
1422 the collection.
1424 This method is a temporary placeholder for better support for
1425 association results in `queryDatasets`. It will probably be
1426 removed in the future, and should be avoided in production code
1427 whenever possible.
1429 Parameters
1430 ----------
1431 datasetType : `DatasetType` or `str`
1432 A dataset type object or the name of one.
1433 collections : collection expression, optional
1434 An expression that identifies the collections to search for
1435 datasets, such as a `str` (for full matches or partial matches
1436 via globs), `re.Pattern` (for partial matches), or iterable
1437 thereof. ``...`` can be used to search all collections (actually
1438 just all `~CollectionType.RUN` collections, because this will still
1439 find all datasets). If not provided, ``self.default.collections``
1440 is used. See :ref:`daf_butler_collection_expressions` for more
1441 information.
1442 collectionTypes : `~collections.abc.Set` [ `CollectionType` ], optional
1443 If provided, only yield associations from collections of these
1444 types.
1445 flattenChains : `bool`, optional
1446 This parameter has no effect and is ignored.
1448 Yields
1449 ------
1450 association : `.DatasetAssociation`
1451 Object representing the relationship between a single dataset and
1452 a single collection.
1454 Raises
1455 ------
1456 lsst.daf.butler.registry.NoDefaultCollectionError
1457 Raised if ``collections`` is `None` and
1458 ``self.defaults.collections`` is `None`.
1459 lsst.daf.butler.registry.CollectionExpressionError
1460 Raised when ``collections`` expression is invalid.
1461 """
1462 raise NotImplementedError()
1464 @property
1465 def obsCoreTableManager(self) -> ObsCoreTableManager | None:
1466 """The ObsCore manager instance for this registry
1467 (`~.interfaces.ObsCoreTableManager`
1468 or `None`).
1470 ObsCore manager may not be implemented for all registry backend, or
1471 may not be enabled for many repositories.
1472 """
1473 return None
1475 @property
1476 def storageClasses(self) -> StorageClassFactory:
1477 """All storage classes known to the registry
1478 (`StorageClassFactory`).
1479 """
1480 raise NotImplementedError()