Coverage for python/lsst/daf/butler/registry/wildcards.py : 18%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ["CategorizedWildcard", "CollectionQuery", "CollectionSearch", "DatasetTypeRestriction"]
26from dataclasses import dataclass
27import itertools
28import operator
29import re
30from typing import (
31 Any,
32 Callable,
33 FrozenSet,
34 Iterator,
35 List,
36 Optional,
37 Set,
38 Tuple,
39 TYPE_CHECKING,
40 Union,
41)
43import sqlalchemy
45from ..core import DatasetType
46from ..core.utils import iterable
47from ._collectionType import CollectionType
49if TYPE_CHECKING: 49 ↛ 50line 49 didn't jump to line 50, because the condition on line 49 was never true
50 from .interfaces import CollectionManager, CollectionRecord
53@dataclass
54class CategorizedWildcard:
55 """The results of preprocessing a wildcard expression to separate match
56 patterns from strings.
58 The `fromExpression` method should almost always be used to construct
59 instances, as the regular constructor performs no checking of inputs (and
60 that can lead to confusing error messages downstream).
61 """
63 @classmethod
64 def fromExpression(cls, expression: Any, *,
65 allowAny: bool = True,
66 allowPatterns: bool = True,
67 coerceUnrecognized: Optional[Callable[[Any], Union[Tuple[str, Any], str]]] = None,
68 coerceItemValue: Optional[Callable[[Any], Any]] = None,
69 defaultItemValue: Optional[Any] = None,
70 ) -> Union[CategorizedWildcard, type(...)]:
71 """Categorize a wildcard expression.
73 Parameters
74 ----------
75 expression
76 The expression to categorize. May be any of:
77 - `str`;
78 - `re.Pattern` (only if ``allowPatterns`` is `True`);
79 - objects recognized by ``coerceUnrecognized`` (if provided);
80 - two-element tuples of (`str`, value) where value is recognized
81 by ``coerceItemValue`` (if provided);
82 - a non-`str`, non-mapping iterable containing any of the above;
83 - the special value `...` (only if ``allowAny`` is `True`), which
84 matches anything;
85 - a mapping from `str` to a value are recognized by
86 ``coerceItemValue`` (if provided);
87 - a `CategorizedWildcard` instance (passed through unchanged if
88 it meets the requirements specified by keyword arguments).
89 allowAny: `bool`, optional
90 If `False` (`True` is default) raise `TypeError` if `...` is
91 encountered.
92 allowPatterns: `bool`, optional
93 If `False` (`True` is default) raise `TypeError` if a `re.Pattern`
94 is encountered, or if ``expression`` is a `CategorizedWildcard`
95 with `patterns` not empty.
96 coerceUnrecognized: `Callable`, optional
97 A callback that takes a single argument of arbitrary type and
98 returns either a `str` - appended to `strings` - or a `tuple` of
99 (`str`, `Any`) to be appended to `items`. This will be called on
100 objects of unrecognized type, with the return value added to
101 `strings`. Exceptions will be reraised as `TypeError` (and
102 chained).
103 coerceItemValue: `Callable`, optional
104 If provided, ``expression`` may be a mapping from `str` to any
105 type that can be passed to this function; the result of that call
106 will be stored instead as the value in ``self.items``.
107 defaultItemValue: `Any`, optional
108 If provided, combine this value with any string values encountered
109 (including any returned by ``coerceUnrecognized``) to form a
110 `tuple` and add it to `items`, guaranteeing that `strings` will be
111 empty. Patterns are never added to `items`.
113 Returns
114 -------
115 categorized : `CategorizedWildcard` or ``...``.
116 The struct describing the wildcard. ``...`` is passed through
117 unchanged.
119 Raises
120 ------
121 TypeError
122 Raised if an unsupported type is found in the expression.
123 """
124 assert expression is not None
125 # See if we were given ...; just return that if we were.
126 if expression is ...:
127 if not allowAny:
128 raise TypeError("This expression may not be unconstrained.")
129 return ...
130 if isinstance(expression, cls):
131 # This is already a CategorizedWildcard. Make sure it meets the
132 # reqs. implied by the kwargs we got.
133 if not allowPatterns and expression.patterns:
134 raise TypeError(f"Regular expression(s) {expression.patterns} "
135 f"are not allowed in this context.")
136 if defaultItemValue is not None and expression.strings:
137 if expression.items:
138 raise TypeError("Incompatible preprocessed expression: an ordered sequence of str is "
139 "needed, but the original order was lost in the preprocessing.")
140 return cls(strings=[], patterns=expression.patterns,
141 items=[(k, defaultItemValue) for k in expression.strings])
142 elif defaultItemValue is None and expression.items:
143 if expression.strings:
144 raise TypeError("Incompatible preprocessed expression: an ordered sequence of items is "
145 "needed, but the original order was lost in the preprocessing.")
146 return cls(strings=[k for k, _ in expression.items], patterns=expression.patterns, items=[])
147 else:
148 # Original expression was created with keyword arguments that
149 # were at least as restrictive as what we just got; pass it
150 # through.
151 return expression
153 # If we get here, we know we'll be creating a new instance.
154 # Initialize an empty one now.
155 self = cls(strings=[], patterns=[], items=[])
157 # If mappings are allowed, see if we were given a single mapping by
158 # trying to get items.
159 if coerceItemValue is not None:
160 rawItems = None
161 try:
162 rawItems = expression.items()
163 except AttributeError:
164 pass
165 if rawItems is not None:
166 for k, v in rawItems:
167 try:
168 self.items.append((k, coerceItemValue(v)))
169 except Exception as err:
170 raise TypeError(f"Could not coerce mapping value '{v}' for key '{k}'.") from err
171 return self
173 # Not ..., a CategorizedWildcard instance, or a mapping. Just
174 # process scalars or an iterable. We put the body of the loop inside
175 # a local function so we can recurse after coercion.
177 def process(element: Any, alreadyCoerced: bool = False):
178 if isinstance(element, str):
179 if defaultItemValue is not None:
180 self.items.append((element, defaultItemValue))
181 else:
182 self.strings.append(element)
183 return
184 if allowPatterns and isinstance(element, re.Pattern):
185 self.patterns.append(element)
186 return
187 if coerceItemValue is not None:
188 try:
189 k, v = element
190 except TypeError:
191 pass
192 else:
193 if not alreadyCoerced:
194 if not isinstance(k, str):
195 raise TypeError(f"Item key '{k}' is not a string.")
196 try:
197 v = coerceItemValue(v)
198 except Exception as err:
199 raise TypeError(f"Could not coerce tuple item value '{v}' for key '{k}'."
200 ) from err
201 self.items.append((k, v))
202 return
203 if alreadyCoerced:
204 raise TypeError(f"Object '{element}' returned by coercion function is still unrecognized.")
205 if coerceUnrecognized is not None:
206 try:
207 process(coerceUnrecognized(element), alreadyCoerced=True)
208 except Exception as err:
209 raise TypeError(f"Could not coerce expression element '{element}'.") from err
210 else:
211 raise TypeError(f"Unsupported object in wildcard expression: '{element}'.")
213 for element in iterable(expression):
214 process(element)
215 return self
217 def makeWhereExpression(self, column: sqlalchemy.sql.ColumnElement
218 ) -> Optional[sqlalchemy.sql.ColumnElement]:
219 """Transform the wildcard into a SQLAlchemy boolean expression suitable
220 for use in a WHERE clause.
222 Parameters
223 ----------
224 column : `sqlalchemy.sql.ColumnElement`
225 A string column in a table or query that should be compared to the
226 wildcard expression.
228 Returns
229 -------
230 where : `sqlalchemy.sql.ColumnElement` or `None`
231 A boolean SQL expression that evaluates to true if and only if
232 the value of ``column`` matches the wildcard. `None` is returned
233 if both `strings` and `patterns` are empty, and hence no match is
234 possible.
235 """
236 if self.items:
237 raise NotImplementedError("Expressions that are processed into items cannot be transformed "
238 "automatically into queries.")
239 if self.patterns:
240 raise NotImplementedError("Regular expression patterns are not yet supported here.")
241 terms = []
242 if len(self.strings) == 1:
243 terms.append(column == self.strings[0])
244 elif len(self.strings) > 1:
245 terms.append(column.in_(self.strings))
246 # TODO: append terms for regular expressions
247 if not terms:
248 return None
249 return sqlalchemy.sql.or_(*terms)
251 strings: List[str]
252 """Explicit string values found in the wildcard (`list` [ `str` ]).
253 """
255 patterns: List[re.Pattern]
256 """Regular expression patterns found in the wildcard
257 (`list` [ `re.Pattern` ]).
258 """
260 items: List[Tuple[str, Any]]
261 """Two-item tuples that relate string values to other objects
262 (`list` [ `tuple` [ `str`, `Any` ] ]).
263 """
266class DatasetTypeRestriction:
267 """An immutable set-like object that represents a restriction on the
268 dataset types to search for within a collection.
270 The `fromExpression` method should almost always be used to construct
271 instances, as the regular constructor performs no checking of inputs (and
272 that can lead to confusing error messages downstream).
274 Parameters
275 ----------
276 names : `frozenset` [`str`] or `...`
277 The names of the dataset types included in the restriction, or `...`
278 to permit a search for any dataset type.
280 Notes
281 -----
282 This class does not inherit from `collections.abc.Set` (and does not
283 implement the full set interface) because is not always iterable and
284 sometimes has no length (i.e. when ``names`` is ``...``).
285 """
286 def __init__(self, names: Union[FrozenSet[str], type(...)]):
287 self.names = names
289 __slots__ = ("names",)
291 @classmethod
292 def fromExpression(cls, expression: Any) -> DatasetTypeRestriction:
293 """Process a general expression to construct a `DatasetTypeRestriction`
294 instance.
296 Parameters
297 ----------
298 expression
299 May be:
300 - a `DatasetType` instance;
301 - a `str` dataset type name;
302 - any non-mapping iterable containing either of the above;
303 - the special value `...`;
304 - another `DatasetTypeRestriction` instance (passed through
305 unchanged).
307 Returns
308 -------
309 restriction : `DatasetTypeRestriction`
310 A `DatasetTypeRestriction` instance.
311 """
312 if isinstance(expression, cls):
313 return expression
314 wildcard = CategorizedWildcard.fromExpression(expression, allowPatterns=False,
315 coerceUnrecognized=lambda d: d.name)
316 if wildcard is ...:
317 return cls.any
318 else:
319 return cls(frozenset(wildcard.strings))
321 def __contains__(self, datasetType: DatasetType) -> bool:
322 return (self.names is ... or datasetType.name in self.names
323 or (datasetType.isComponent()
324 and DatasetType.splitDatasetTypeName(datasetType.name)[0] in self.names))
326 def __eq__(self, other: CollectionSearch) -> bool:
327 return self.names == other.names
329 def __str__(self) -> str:
330 if self.names is ...:
331 return "..."
332 else:
333 return "{{{}}}".format(", ".join(self.names))
335 def __repr__(self) -> str:
336 if self.names is ...:
337 return f"DatasetTypeRestriction(...)"
338 else:
339 return f"DatasetTypeRestriction({self.names!r})"
341 @staticmethod
342 def union(*args: DatasetTypeRestriction) -> DatasetTypeRestriction:
343 """Merge one or more `DatasetTypeRestriction` instances, returning one
344 that allows any of the dataset types included in any of them.
346 Parameters
347 ----------
348 args
349 Positional arguments are `DatasetTypeRestriction` instances.
350 """
351 if any(a.names is ... for a in args):
352 return DatasetTypeRestriction.any
353 return DatasetTypeRestriction(frozenset.union(*tuple(a.names for a in args)))
355 names: Union[FrozenSet[str], type(...)]
356 """The names of the dataset types included (i.e. permitted) by the
357 restriction, or the special value `...` to permit all dataset types
358 (`frozenset` [ `str` ] or `...`).
359 """
362DatasetTypeRestriction.any = DatasetTypeRestriction(...)
363"""A special `DatasetTypeRestriction` instance that permits any dataset type.
365This instance should be preferred instead of constructing a new one with `...`,
366when possible, but it should not be assumed to be the only such instance (i.e.
367don't use ``is`` instead of ``==`` for comparisons).
368"""
371def _yieldCollectionRecords(
372 manager: CollectionManager,
373 record: CollectionRecord,
374 restriction: DatasetTypeRestriction,
375 datasetType: Optional[DatasetType] = None,
376 collectionType: Optional[CollectionType] = None,
377 withRestrictions: bool = False,
378 done: Optional[Set[str]] = None,
379 flattenChains: bool = True,
380 includeChains: Optional[bool] = None,
381) -> Iterator[CollectionRecord]:
382 """A helper function containing common logic for `CollectionSearch.iter`
383 and `CollectionQuery.iter`: yield a single `CollectionRecord` only if it
384 matches the criteria given in other arguments.
386 Parameters
387 ----------
388 manager : `CollectionManager`
389 Object responsible for managing the collection tables in a `Registry`.
390 record : `CollectionRecord`
391 Record to conditionally yield.
392 restriction : `DatasetTypeRestriction`
393 A restriction that must match ``datasetType`` (if given) in order to
394 yield ``record``.
395 datasetType : `DatasetType`, optional
396 If given, a `DatasetType` instance that must be included in
397 ``restriction`` in order to yield ``record``.
398 collectionType : `CollectionType`, optional
399 If given, a `CollectionType` enumeration value that must match
400 ``record.type`` in order for ``record`` to be yielded.
401 withRestrictions : `bool`, optional
402 If `True` (`False` is default), yield ``restriction`` along with
403 ``record``.
404 done : `set` [ `str` ], optional
405 A `set` of already-yielded collection names; if provided, ``record``
406 will only be yielded if it is not already in ``done``, and ``done``
407 will be updated to include it on return.
408 flattenChains : `bool`, optional
409 If `True` (default) recursively yield the child collections of
410 `~CollectionType.CHAINED` colelctions.
411 includeChains : `bool`, optional
412 If `False`, return records for `~CollectionType.CHAINED` collections
413 themselves. The default is the opposite of ``flattenChains``: either
414 return records for CHAINED collections or their children, but not both.
416 Yields
417 ------
418 record : `CollectionRecord`
419 The given collection record.
420 restriction : `DatasetTypeRestriction`
421 The given dataset type restriction; yielded only if
422 ``withRestrictions`` is `True`.
423 """
424 includeChains = includeChains if includeChains is not None else not flattenChains
425 if collectionType is None or record.type is collectionType:
426 done.add(record.name)
427 if record.type is not CollectionType.CHAINED or includeChains:
428 if withRestrictions:
429 yield record, restriction
430 else:
431 yield record
432 if flattenChains and record.type is CollectionType.CHAINED:
433 done.add(record.name)
434 yield from record.children.iter(
435 manager,
436 datasetType=datasetType,
437 collectionType=collectionType,
438 withRestrictions=withRestrictions,
439 done=done,
440 flattenChains=flattenChains,
441 includeChains=includeChains,
442 )
445class CollectionSearch:
446 """An ordered search path of collections and dataset type restrictions.
448 The `fromExpression` method should almost always be used to construct
449 instances, as the regular constructor performs no checking of inputs (and
450 that can lead to confusing error messages downstream).
452 Parameters
453 ----------
454 items : `list` [ `tuple` [ `str`, `DatasetTypeRestriction` ] ]
455 Tuples that relate a collection name to the restriction on dataset
456 types to search for within it. This is not a mapping because the
457 same collection name may appear multiple times with different
458 restrictions.
460 Notes
461 -----
462 A `CollectionSearch` is used to find a single dataset according to its
463 dataset type and data ID, giving preference to collections in which the
464 order they are specified. A `CollectionQuery` can be constructed from
465 a broader range of expressions but does not order the collections to be
466 searched.
468 `CollectionSearch` is iterable, yielding two-element tuples of `str`
469 (collection name) and `DatasetTypeRestriction`.
471 A `CollectionSearch` instance constructed properly (e.g. via
472 `fromExpression`) is a unique representation of a particular search path;
473 it is exactly the same internally and compares as equal to any
474 `CollectionSearch` constructed from an equivalent expression,
475 regardless of how different the original expressions appear.
476 """
477 def __init__(self, items: List[Tuple[[str, DatasetTypeRestriction]]]):
478 assert all(isinstance(v, DatasetTypeRestriction) for _, v in items)
479 self._items = items
481 __slots__ = ("_items")
483 @classmethod
484 def fromExpression(cls, expression: Any) -> CollectionSearch:
485 """Process a general expression to construct a `CollectionSearch`
486 instance.
488 Parameters
489 ----------
490 expression
491 May be:
492 - a `str` collection name;
493 - a two-element `tuple` containing a `str` and any expression
494 accepted by `DatasetTypeRestriction.fromExpression`;
495 - any non-mapping iterable containing either of the above;
496 - a mapping from `str` to any expression accepted by
497 `DatasetTypeRestriction`.
498 - another `CollectionSearch` instance (passed through
499 unchanged).
501 Multiple consecutive entries for the same collection with different
502 restrictions will be merged. Non-consecutive entries will not,
503 because that actually represents a different search path.
505 Returns
506 -------
507 collections : `CollectionSearch`
508 A `CollectionSearch` instance.
509 """
510 # First see if this is already a CollectionSearch; just pass that
511 # through unchanged. This lets us standardize expressions (and turn
512 # single-pass iterators into multi-pass iterables) in advance and pass
513 # them down to other routines that accept arbitrary expressions.
514 if isinstance(expression, cls):
515 return expression
516 wildcard = CategorizedWildcard.fromExpression(expression,
517 allowAny=False,
518 allowPatterns=False,
519 coerceItemValue=DatasetTypeRestriction.fromExpression,
520 defaultItemValue=DatasetTypeRestriction.any)
521 assert wildcard is not ...
522 assert not wildcard.patterns
523 assert not wildcard.strings
524 return cls(
525 # Consolidate repetitions of the same collection name.
526 [(name, DatasetTypeRestriction.union(*tuple(item[1] for item in items)))
527 for name, items in itertools.groupby(wildcard.items, key=operator.itemgetter(0))]
528 )
530 def iter(
531 self, manager: CollectionManager, *,
532 datasetType: Optional[DatasetType] = None,
533 collectionType: Optional[CollectionType] = None,
534 withRestrictions: bool = False,
535 done: Optional[Set[str]] = None,
536 flattenChains: bool = True,
537 includeChains: Optional[bool] = None,
538 ) -> Iterator[CollectionRecord]:
539 """Iterate over collection records that match this instance and the
540 given criteria, in order.
542 This method is primarily intended for internal use by `Registry`;
543 other callers should generally prefer `Registry.findDatasets` or
544 other `Registry` query methods.
546 Parameters
547 ----------
548 manager : `CollectionManager`
549 Object responsible for managing the collection tables in a
550 `Registry`.
551 datasetType : `DatasetType`, optional
552 If given, only yield collections whose dataset type restrictions
553 include this dataset type.
554 collectionType : `CollectionType`, optional
555 If given, only yield collections of this type.
556 withRestrictions : `bool`, optional
557 If `True` (`False` is default) yield the associated
558 `DatasetTypeRestriction` along with each `CollectionRecord`.
559 done : `set`, optional
560 A `set` containing the names of all collections already yielded;
561 any collections whose names are already present in this set will
562 not be yielded again, and those yielded will be added to it while
563 iterating. If not provided, an empty `set` will be created and
564 used internally to avoid duplicates.
565 flattenChains : `bool`, optional
566 If `True` (default) recursively yield the child collections of
567 `~CollectionType.CHAINED` colelctions.
568 includeChains : `bool`, optional
569 If `False`, return records for `~CollectionType.CHAINED`
570 collections themselves. The default is the opposite of
571 ``flattenChains``: either return records for CHAINED collections or
572 their children, but not both.
573 """
574 if done is None:
575 done = set()
576 for name, restriction in self._items:
577 if name not in done and (datasetType is None or datasetType in restriction):
578 yield from _yieldCollectionRecords(
579 manager,
580 manager.find(name),
581 restriction,
582 datasetType=datasetType,
583 collectionType=collectionType,
584 withRestrictions=withRestrictions,
585 done=done,
586 flattenChains=flattenChains,
587 includeChains=includeChains,
588 )
590 def __iter__(self) -> Iterator[Tuple[str, DatasetTypeRestriction]]:
591 yield from self._items
593 def __len__(self) -> bool:
594 return len(self._items)
596 def __eq__(self, other: CollectionSearch) -> bool:
597 return self._items == other._items
599 def __str__(self) -> str:
600 return "[{}]".format(", ".join(f"{k}: {v}" for k, v in self._items))
602 def __repr__(self) -> str:
603 return f"CollectionSearch({self._items!r})"
606class CollectionQuery:
607 """An unordered query for collections and dataset type restrictions.
609 The `fromExpression` method should almost always be used to construct
610 instances, as the regular constructor performs no checking of inputs (and
611 that can lead to confusing error messages downstream).
613 Parameters
614 ----------
615 search : `CollectionSearch` or `...`
616 An object representing an ordered search for explicitly-named
617 collections (to be interpreted here as unordered), or the special
618 value `...` indicating all collections. `...` must be accompanied
619 by ``patterns=None``.
620 patterns : `tuple` of `re.Pattern`
621 Regular expression patterns to match against collection names.
623 Notes
624 -----
625 A `CollectionQuery` is used to find all matching datasets in any number
626 of collections, or to find collections themselves.
628 `CollectionQuery` is expected to be rarely used outside of `Registry`
629 (which uses it to back several of its "query" methods that take general
630 expressions for collections), but it may occassionally be useful outside
631 `Registry` as a way to preprocess expressions that contain single-pass
632 iterators into a form that can be used to call those `Registry` methods
633 multiple times.
634 """
635 def __init__(self, search: Union[CollectionSearch, type(...)], patterns: Optional[Tuple[str, ...]]):
636 self._search = search
637 self._patterns = patterns
639 __slots__ = ("_search", "_patterns")
641 @classmethod
642 def fromExpression(cls, expression: Any) -> Union[CollectionQuery, type(...)]:
643 """Process a general expression to construct a `CollectionQuery`
644 instance.
646 Parameters
647 ----------
648 expression
649 May be:
650 - a `str` collection name;
651 - a two-element `tuple` containing a `str` and any expression
652 accepted by `DatasetTypeRestriction.fromExpression`;
653 - an `re.Pattern` instance to match (with `re.Pattern.fullmatch`)
654 against collection names;
655 - any non-mapping iterable containing any of the above;
656 - a mapping from `str` to any expression accepted by
657 `DatasetTypeRestriction`.
658 - a `CollectionSearch` instance;
659 - another `CollectionQuery` instance (passed through unchanged).
661 Multiple consecutive entries for the same collection with different
662 restrictions will be merged. Non-consecutive entries will not,
663 because that actually represents a different search path.
665 Returns
666 -------
667 collections : `CollectionQuery`
668 A `CollectionQuery` instance.
669 """
670 if isinstance(expression, cls):
671 return expression
672 if expression is ...:
673 return cls.any
674 if isinstance(expression, CollectionSearch):
675 return cls(search=expression, patterns=())
676 wildcard = CategorizedWildcard.fromExpression(expression,
677 allowAny=True,
678 allowPatterns=True,
679 coerceItemValue=DatasetTypeRestriction.fromExpression,
680 defaultItemValue=DatasetTypeRestriction.any)
681 if wildcard is ...:
682 return cls.any
683 assert not wildcard.strings
684 return cls(search=CollectionSearch.fromExpression(wildcard),
685 patterns=tuple(wildcard.patterns))
687 def iter(
688 self, manager: CollectionManager, *,
689 datasetType: Optional[DatasetType] = None,
690 collectionType: Optional[CollectionType] = None,
691 withRestrictions: bool = False,
692 flattenChains: bool = True,
693 includeChains: Optional[bool] = None,
694 ) -> Iterator[CollectionRecord]:
695 """Iterate over collection records that match this instance and the
696 given criteria, in an arbitrary order.
698 This method is primarily intended for internal use by `Registry`;
699 other callers should generally prefer `Registry.queryDatasets` or
700 other `Registry` query methods.
702 Parameters
703 ----------
704 manager : `CollectionManager`
705 Object responsible for managing the collection tables in a
706 `Registry`.
707 datasetType : `DatasetType`, optional
708 If given, only yield collections whose dataset type restrictions
709 include this dataset type.
710 collectionType : `CollectionType`, optional
711 If given, only yield collections of this type.
712 withRestrictions : `bool`, optional
713 If `True` (`False` is default) yield the associated
714 `DatasetTypeRestriction` along with each `CollectionRecord`.
715 flattenChains : `bool`, optional
716 If `True` (default) recursively yield the child collections of
717 `~CollectionType.CHAINED` colelctions.
718 includeChains : `bool`, optional
719 If `False`, return records for `~CollectionType.CHAINED`
720 collections themselves. The default is the opposite of
721 ``flattenChains``: either return records for CHAINED collections or
722 their children, but not both.
723 """
724 if self._search is ...:
725 yield from manager
726 else:
727 done = set()
728 yield from self._search.iter(
729 manager,
730 datasetType=datasetType,
731 collectionType=collectionType,
732 withRestrictions=withRestrictions,
733 done=done,
734 flattenChains=flattenChains,
735 includeChains=includeChains,
736 )
737 for record in manager:
738 if record.name not in done and any(p.fullmatch(record.name) for p in self._patterns):
739 yield from _yieldCollectionRecords(
740 manager,
741 record,
742 DatasetTypeRestriction.any,
743 datasetType=datasetType,
744 collectionType=collectionType,
745 withRestrictions=withRestrictions,
746 done=done,
747 flattenChains=flattenChains,
748 includeChains=includeChains,
749 )
752CollectionQuery.any = CollectionQuery(..., None)
753"""A special `CollectionQuery` instance that matches any collection.
755This instance should be preferred instead of constructing a new one with `...`,
756when possible, but it should not be assumed to be the only such instance.
757"""