Coverage for python/lsst/daf/butler/registry/wildcards.py : 18%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ["CategorizedWildcard", "CollectionQuery", "CollectionSearch", "DatasetTypeRestriction"]
26from dataclasses import dataclass
27import itertools
28import operator
29import re
30from typing import (
31 Any,
32 Callable,
33 FrozenSet,
34 Iterator,
35 List,
36 Optional,
37 Set,
38 Tuple,
39 TYPE_CHECKING,
40 Union,
41)
43import sqlalchemy
45from ..core import DatasetType
46from ..core.utils import iterable
47from ._collectionType import CollectionType
49if TYPE_CHECKING: 49 ↛ 50line 49 didn't jump to line 50, because the condition on line 49 was never true
50 from .interfaces import CollectionManager, CollectionRecord
53@dataclass
54class CategorizedWildcard:
55 """The results of preprocessing a wildcard expression to separate match
56 patterns from strings.
58 The `fromExpression` method should almost always be used to construct
59 instances, as the regular constructor performs no checking of inputs (and
60 that can lead to confusing error messages downstream).
61 """
63 @classmethod
64 def fromExpression(cls, expression: Any, *,
65 allowAny: bool = True,
66 allowPatterns: bool = True,
67 coerceUnrecognized: Optional[Callable[[Any], Union[Tuple[str, Any], str]]] = None,
68 coerceItemValue: Optional[Callable[[Any], Any]] = None,
69 defaultItemValue: Optional[Any] = None,
70 ) -> Union[CategorizedWildcard, type(...)]:
71 """Categorize a wildcard expression.
73 Parameters
74 ----------
75 expression
76 The expression to categorize. May be any of:
77 - `str`;
78 - `re.Pattern` (only if ``allowPatterns`` is `True`);
79 - objects recognized by ``coerceUnrecognized`` (if provided);
80 - two-element tuples of (`str`, value) where value is recognized
81 by ``coerceItemValue`` (if provided);
82 - a non-`str`, non-mapping iterable containing any of the above;
83 - the special value `...` (only if ``allowAny`` is `True`), which
84 matches anything;
85 - a mapping from `str` to a value are recognized by
86 ``coerceItemValue`` (if provided);
87 - a `CategorizedWildcard` instance (passed through unchanged if
88 it meets the requirements specified by keyword arguments).
89 allowAny: `bool`, optional
90 If `False` (`True` is default) raise `TypeError` if `...` is
91 encountered.
92 allowPatterns: `bool`, optional
93 If `False` (`True` is default) raise `TypeError` if a `re.Pattern`
94 is encountered, or if ``expression`` is a `CategorizedWildcard`
95 with `patterns` not empty.
96 coerceUnrecognized: `Callable`, optional
97 A callback that takes a single argument of arbitrary type and
98 returns either a `str` - appended to `strings` - or a `tuple` of
99 (`str`, `Any`) to be appended to `items`. This will be called on
100 objects of unrecognized type, with the return value added to
101 `strings`. Exceptions will be reraised as `TypeError` (and
102 chained).
103 coerceItemValue: `Callable`, optional
104 If provided, ``expression`` may be a mapping from `str` to any
105 type that can be passed to this function; the result of that call
106 will be stored instead as the value in ``self.items``.
107 defaultItemValue: `Any`, optional
108 If provided, combine this value with any string values encountered
109 (including any returned by ``coerceUnrecognized``) to form a
110 `tuple` and add it to `items`, guaranteeing that `strings` will be
111 empty. Patterns are never added to `items`.
113 Returns
114 -------
115 categorized : `CategorizedWildcard` or ``...``.
116 The struct describing the wildcard. ``...`` is passed through
117 unchanged.
119 Raises
120 ------
121 TypeError
122 Raised if an unsupported type is found in the expression.
123 """
124 assert expression is not None
125 # See if we were given ...; just return that if we were.
126 if expression is ...:
127 if not allowAny:
128 raise TypeError("This expression may not be unconstrained.")
129 return ...
130 if isinstance(expression, cls):
131 # This is already a CategorizedWildcard. Make sure it meets the
132 # reqs. implied by the kwargs we got.
133 if not allowPatterns and expression.patterns:
134 raise TypeError(f"Regular expression(s) {expression.patterns} "
135 f"are not allowed in this context.")
136 if defaultItemValue is not None and expression.strings:
137 if expression.items:
138 raise TypeError("Incompatible preprocessed expression: an ordered sequence of str is "
139 "needed, but the original order was lost in the preprocessing.")
140 return cls(strings=[], patterns=expression.patterns,
141 items=[(k, defaultItemValue) for k in expression.strings])
142 elif defaultItemValue is None and expression.items:
143 if expression.strings:
144 raise TypeError("Incompatible preprocessed expression: an ordered sequence of items is "
145 "needed, but the original order was lost in the preprocessing.")
146 return cls(strings=[k for k, _ in expression.items], patterns=expression.patterns, items=[])
147 else:
148 # Original expression was created with keyword arguments that
149 # were at least as restrictive as what we just got; pass it
150 # through.
151 return expression
153 # If we get here, we know we'll be creating a new instance.
154 # Initialize an empty one now.
155 self = cls(strings=[], patterns=[], items=[])
157 # If mappings are allowed, see if we were given a single mapping by
158 # trying to get items.
159 if coerceItemValue is not None:
160 rawItems = None
161 try:
162 rawItems = expression.items()
163 except AttributeError:
164 pass
165 if rawItems is not None:
166 for k, v in rawItems:
167 try:
168 self.items.append((k, coerceItemValue(v)))
169 except Exception as err:
170 raise TypeError(f"Could not coerce mapping value '{v}' for key '{k}'.") from err
171 return self
173 # Not ..., a CategorizedWildcard instance, or a mapping. Just
174 # process scalars or an iterable. We put the body of the loop inside
175 # a local function so we can recurse after coercion.
177 def process(element: Any, alreadyCoerced: bool = False):
178 if isinstance(element, str):
179 if defaultItemValue is not None:
180 self.items.append((element, defaultItemValue))
181 else:
182 self.strings.append(element)
183 return
184 if allowPatterns and isinstance(element, re.Pattern):
185 self.patterns.append(element)
186 return
187 if coerceItemValue is not None:
188 try:
189 k, v = element
190 except TypeError:
191 pass
192 else:
193 if not alreadyCoerced:
194 if not isinstance(k, str):
195 raise TypeError(f"Item key '{k}' is not a string.")
196 try:
197 v = coerceItemValue(v)
198 except Exception as err:
199 raise TypeError(f"Could not coerce tuple item value '{v}' for key '{k}'."
200 ) from err
201 self.items.append((k, v))
202 return
203 if alreadyCoerced:
204 raise TypeError(f"Object '{element}' returned by coercion function is still unrecognized.")
205 if coerceUnrecognized is not None:
206 try:
207 process(coerceUnrecognized(element), alreadyCoerced=True)
208 except Exception as err:
209 raise TypeError(f"Could not coerce expression element '{element}'.") from err
210 else:
211 raise TypeError(f"Unsupported object in wildcard expression: '{element}'.")
213 for element in iterable(expression):
214 process(element)
215 return self
217 def makeWhereExpression(self, column: sqlalchemy.sql.ColumnElement
218 ) -> Optional[sqlalchemy.sql.ColumnElement]:
219 """Transform the wildcard into a SQLAlchemy boolean expression suitable
220 for use in a WHERE clause.
222 Parameters
223 ----------
224 column : `sqlalchemy.sql.ColumnElement`
225 A string column in a table or query that should be compared to the
226 wildcard expression.
228 Returns
229 -------
230 where : `sqlalchemy.sql.ColumnElement` or `None`
231 A boolean SQL expression that evaluates to true if and only if
232 the value of ``column`` matches the wildcard. `None` is returned
233 if both `strings` and `patterns` are empty, and hence no match is
234 possible.
235 """
236 if self.items:
237 raise NotImplementedError("Expressions that are processed into items cannot be transformed "
238 "automatically into queries.")
239 if self.patterns:
240 raise NotImplementedError("Regular expression patterns are not yet supported here.")
241 terms = []
242 if len(self.strings) == 1:
243 terms.append(column == self.strings[0])
244 elif len(self.strings) > 1:
245 terms.append(column.in_(self.strings))
246 # TODO: append terms for regular expressions
247 if not terms:
248 return None
249 return sqlalchemy.sql.or_(*terms)
251 strings: List[str]
252 """Explicit string values found in the wildcard (`list` [ `str` ]).
253 """
255 patterns: List[re.Pattern]
256 """Regular expression patterns found in the wildcard
257 (`list` [ `re.Pattern` ]).
258 """
260 items: List[Tuple[str, Any]]
261 """Two-item tuples that relate string values to other objects
262 (`list` [ `tuple` [ `str`, `Any` ] ]).
263 """
266class DatasetTypeRestriction:
267 """An immutable set-like object that represents a restriction on the
268 dataset types to search for within a collection.
270 The `fromExpression` method should almost always be used to construct
271 instances, as the regular constructor performs no checking of inputs (and
272 that can lead to confusing error messages downstream).
274 Parameters
275 ----------
276 names : `frozenset` [`str`] or `...`
277 The names of the dataset types included in the restriction, or `...`
278 to permit a search for any dataset type.
280 Notes
281 -----
282 This class does not inherit from `collections.abc.Set` (and does not
283 implement the full set interface) because is not always iterable and
284 sometimes has no length (i.e. when ``names`` is ``...``).
285 """
286 def __init__(self, names: Union[FrozenSet[str], type(...)]):
287 self.names = names
289 __slots__ = ("names",)
291 @classmethod
292 def fromExpression(cls, expression: Any) -> DatasetTypeRestriction:
293 """Process a general expression to construct a `DatasetTypeRestriction`
294 instance.
296 Parameters
297 ----------
298 expression
299 May be:
300 - a `DatasetType` instance;
301 - a `str` dataset type name;
302 - any non-mapping iterable containing either of the above;
303 - the special value `...`;
304 - another `DatasetTypeRestriction` instance (passed through
305 unchanged).
307 Returns
308 -------
309 restriction : `DatasetTypeRestriction`
310 A `DatasetTypeRestriction` instance.
311 """
312 if isinstance(expression, cls):
313 return expression
314 wildcard = CategorizedWildcard.fromExpression(expression, allowPatterns=False,
315 coerceUnrecognized=lambda d: d.name)
316 if wildcard is ...:
317 return cls.any
318 else:
319 return cls(frozenset(wildcard.strings))
321 def __contains__(self, datasetType: DatasetType) -> bool:
322 return (self.names is ... or datasetType.name in self.names
323 or (datasetType.isComponent()
324 and DatasetType.splitDatasetTypeName(datasetType.name)[0] in self.names))
326 def __eq__(self, other: CollectionSearch) -> bool:
327 return self.names == other.names
329 def __str__(self) -> str:
330 if self.names is ...:
331 return "..."
332 else:
333 return "{{{}}}".format(", ".join(self.names))
335 def __repr__(self) -> str:
336 if self.names is ...:
337 return f"DatasetTypeRestriction(...)"
338 else:
339 return f"DatasetTypeRestriction({self.names!r})"
341 @staticmethod
342 def union(*args: DatasetTypeRestriction) -> DatasetTypeRestriction:
343 """Merge one or more `DatasetTypeRestriction` instances, returning one
344 that allows any of the dataset types included in any of them.
346 Parameters
347 ----------
348 args
349 Positional arguments are `DatasetTypeRestriction` instances.
350 """
351 if any(a.names is ... for a in args):
352 return DatasetTypeRestriction.any
353 return DatasetTypeRestriction(frozenset.union(*tuple(a.names for a in args)))
355 names: Union[FrozenSet[str], type(...)]
356 """The names of the dataset types included (i.e. permitted) by the
357 restriction, or the special value `...` to permit all dataset types
358 (`frozenset` [ `str` ] or `...`).
359 """
362DatasetTypeRestriction.any = DatasetTypeRestriction(...)
363"""A special `DatasetTypeRestriction` instance that permits any dataset type.
365This instance should be preferred instead of constructing a new one with `...`,
366when possible, but it should not be assumed to be the only such instance (i.e.
367don't use ``is`` instead of ``==`` for comparisons).
368"""
371def _yieldCollectionRecords(
372 manager: CollectionManager,
373 record: CollectionRecord,
374 restriction: DatasetTypeRestriction,
375 datasetType: Optional[DatasetType] = None,
376 collectionType: Optional[CollectionType] = None,
377 withRestrictions: bool = False,
378 done: Optional[Set[str]] = None,
379 flattenChains: bool = True,
380 includeChains: Optional[bool] = None,
381) -> Iterator[CollectionRecord]:
382 """A helper function containing common logic for `CollectionSearch.iter`
383 and `CollectionQuery.iter`: yield a single `CollectionRecord` only if it
384 matches the criteria given in other arguments.
386 Parameters
387 ----------
388 manager : `CollectionManager`
389 Object responsible for managing the collection tables in a `Registry`.
390 record : `CollectionRecord`
391 Record to conditionally yield.
392 restriction : `DatasetTypeRestriction`
393 A restriction that must match ``datasetType`` (if given) in order to
394 yield ``record``.
395 datasetType : `DatasetType`, optional
396 If given, a `DatasetType` instance that must be included in
397 ``restriction`` in order to yield ``record``.
398 collectionType : `CollectionType`, optional
399 If given, a `CollectionType` enumeration value that must match
400 ``record.type`` in order for ``record`` to be yielded.
401 withRestrictions : `bool`, optional
402 If `True` (`False` is default), yield ``restriction`` along with
403 ``record``.
404 done : `set` [ `str` ], optional
405 A `set` of already-yielded collection names; if provided, ``record``
406 will only be yielded if it is not already in ``done``, and ``done``
407 will be updated to include it on return.
408 flattenChains : `bool`, optional
409 If `True` (default) recursively yield the child collections of
410 `~CollectionType.CHAINED` colelctions.
411 includeChains : `bool`, optional
412 If `False`, return records for `~CollectionType.CHAINED` collections
413 themselves. The default is the opposite of ``flattenChains``: either
414 return records for CHAINED collections or their children, but not both.
416 Yields
417 ------
418 record : `CollectionRecord`
419 The given collection record.
420 restriction : `DatasetTypeRestriction`
421 The given dataset type restriction; yielded only if
422 ``withRestrictions`` is `True`.
423 """
424 if done is None:
425 done = set()
426 includeChains = includeChains if includeChains is not None else not flattenChains
427 if collectionType is None or record.type is collectionType:
428 done.add(record.name)
429 if record.type is not CollectionType.CHAINED or includeChains:
430 if withRestrictions:
431 yield record, restriction
432 else:
433 yield record
434 if flattenChains and record.type is CollectionType.CHAINED:
435 done.add(record.name)
436 yield from record.children.iter(
437 manager,
438 datasetType=datasetType,
439 collectionType=collectionType,
440 withRestrictions=withRestrictions,
441 done=done,
442 flattenChains=flattenChains,
443 includeChains=includeChains,
444 )
447class CollectionSearch:
448 """An ordered search path of collections and dataset type restrictions.
450 The `fromExpression` method should almost always be used to construct
451 instances, as the regular constructor performs no checking of inputs (and
452 that can lead to confusing error messages downstream).
454 Parameters
455 ----------
456 items : `list` [ `tuple` [ `str`, `DatasetTypeRestriction` ] ]
457 Tuples that relate a collection name to the restriction on dataset
458 types to search for within it. This is not a mapping because the
459 same collection name may appear multiple times with different
460 restrictions.
462 Notes
463 -----
464 A `CollectionSearch` is used to find a single dataset according to its
465 dataset type and data ID, giving preference to collections in which the
466 order they are specified. A `CollectionQuery` can be constructed from
467 a broader range of expressions but does not order the collections to be
468 searched.
470 `CollectionSearch` is iterable, yielding two-element tuples of `str`
471 (collection name) and `DatasetTypeRestriction`.
473 A `CollectionSearch` instance constructed properly (e.g. via
474 `fromExpression`) is a unique representation of a particular search path;
475 it is exactly the same internally and compares as equal to any
476 `CollectionSearch` constructed from an equivalent expression,
477 regardless of how different the original expressions appear.
478 """
479 def __init__(self, items: List[Tuple[[str, DatasetTypeRestriction]]]):
480 assert all(isinstance(v, DatasetTypeRestriction) for _, v in items)
481 self._items = items
483 __slots__ = ("_items")
485 @classmethod
486 def fromExpression(cls, expression: Any) -> CollectionSearch:
487 """Process a general expression to construct a `CollectionSearch`
488 instance.
490 Parameters
491 ----------
492 expression
493 May be:
494 - a `str` collection name;
495 - a two-element `tuple` containing a `str` and any expression
496 accepted by `DatasetTypeRestriction.fromExpression`;
497 - any non-mapping iterable containing either of the above;
498 - a mapping from `str` to any expression accepted by
499 `DatasetTypeRestriction`.
500 - another `CollectionSearch` instance (passed through
501 unchanged).
503 Multiple consecutive entries for the same collection with different
504 restrictions will be merged. Non-consecutive entries will not,
505 because that actually represents a different search path.
507 Returns
508 -------
509 collections : `CollectionSearch`
510 A `CollectionSearch` instance.
511 """
512 # First see if this is already a CollectionSearch; just pass that
513 # through unchanged. This lets us standardize expressions (and turn
514 # single-pass iterators into multi-pass iterables) in advance and pass
515 # them down to other routines that accept arbitrary expressions.
516 if isinstance(expression, cls):
517 return expression
518 wildcard = CategorizedWildcard.fromExpression(expression,
519 allowAny=False,
520 allowPatterns=False,
521 coerceItemValue=DatasetTypeRestriction.fromExpression,
522 defaultItemValue=DatasetTypeRestriction.any)
523 assert wildcard is not ...
524 assert not wildcard.patterns
525 assert not wildcard.strings
526 return cls(
527 # Consolidate repetitions of the same collection name.
528 [(name, DatasetTypeRestriction.union(*tuple(item[1] for item in items)))
529 for name, items in itertools.groupby(wildcard.items, key=operator.itemgetter(0))]
530 )
532 def iter(
533 self, manager: CollectionManager, *,
534 datasetType: Optional[DatasetType] = None,
535 collectionType: Optional[CollectionType] = None,
536 withRestrictions: bool = False,
537 done: Optional[Set[str]] = None,
538 flattenChains: bool = True,
539 includeChains: Optional[bool] = None,
540 ) -> Iterator[CollectionRecord]:
541 """Iterate over collection records that match this instance and the
542 given criteria, in order.
544 This method is primarily intended for internal use by `Registry`;
545 other callers should generally prefer `Registry.findDatasets` or
546 other `Registry` query methods.
548 Parameters
549 ----------
550 manager : `CollectionManager`
551 Object responsible for managing the collection tables in a
552 `Registry`.
553 datasetType : `DatasetType`, optional
554 If given, only yield collections whose dataset type restrictions
555 include this dataset type.
556 collectionType : `CollectionType`, optional
557 If given, only yield collections of this type.
558 withRestrictions : `bool`, optional
559 If `True` (`False` is default) yield the associated
560 `DatasetTypeRestriction` along with each `CollectionRecord`.
561 done : `set`, optional
562 A `set` containing the names of all collections already yielded;
563 any collections whose names are already present in this set will
564 not be yielded again, and those yielded will be added to it while
565 iterating. If not provided, an empty `set` will be created and
566 used internally to avoid duplicates.
567 flattenChains : `bool`, optional
568 If `True` (default) recursively yield the child collections of
569 `~CollectionType.CHAINED` colelctions.
570 includeChains : `bool`, optional
571 If `False`, return records for `~CollectionType.CHAINED`
572 collections themselves. The default is the opposite of
573 ``flattenChains``: either return records for CHAINED collections or
574 their children, but not both.
575 """
576 if done is None:
577 done = set()
578 for name, restriction in self._items:
579 if name not in done and (datasetType is None or datasetType in restriction):
580 yield from _yieldCollectionRecords(
581 manager,
582 manager.find(name),
583 restriction,
584 datasetType=datasetType,
585 collectionType=collectionType,
586 withRestrictions=withRestrictions,
587 done=done,
588 flattenChains=flattenChains,
589 includeChains=includeChains,
590 )
592 def __iter__(self) -> Iterator[Tuple[str, DatasetTypeRestriction]]:
593 yield from self._items
595 def __len__(self) -> bool:
596 return len(self._items)
598 def __eq__(self, other: CollectionSearch) -> bool:
599 return self._items == other._items
601 def __str__(self) -> str:
602 return "[{}]".format(", ".join(f"{k}: {v}" for k, v in self._items))
604 def __repr__(self) -> str:
605 return f"CollectionSearch({self._items!r})"
608class CollectionQuery:
609 """An unordered query for collections and dataset type restrictions.
611 The `fromExpression` method should almost always be used to construct
612 instances, as the regular constructor performs no checking of inputs (and
613 that can lead to confusing error messages downstream).
615 Parameters
616 ----------
617 search : `CollectionSearch` or `...`
618 An object representing an ordered search for explicitly-named
619 collections (to be interpreted here as unordered), or the special
620 value `...` indicating all collections. `...` must be accompanied
621 by ``patterns=None``.
622 patterns : `tuple` of `re.Pattern`
623 Regular expression patterns to match against collection names.
625 Notes
626 -----
627 A `CollectionQuery` is used to find all matching datasets in any number
628 of collections, or to find collections themselves.
630 `CollectionQuery` is expected to be rarely used outside of `Registry`
631 (which uses it to back several of its "query" methods that take general
632 expressions for collections), but it may occassionally be useful outside
633 `Registry` as a way to preprocess expressions that contain single-pass
634 iterators into a form that can be used to call those `Registry` methods
635 multiple times.
636 """
637 def __init__(self, search: Union[CollectionSearch, type(...)], patterns: Optional[Tuple[str, ...]]):
638 self._search = search
639 self._patterns = patterns
641 __slots__ = ("_search", "_patterns")
643 @classmethod
644 def fromExpression(cls, expression: Any) -> Union[CollectionQuery, type(...)]:
645 """Process a general expression to construct a `CollectionQuery`
646 instance.
648 Parameters
649 ----------
650 expression
651 May be:
652 - a `str` collection name;
653 - a two-element `tuple` containing a `str` and any expression
654 accepted by `DatasetTypeRestriction.fromExpression`;
655 - an `re.Pattern` instance to match (with `re.Pattern.fullmatch`)
656 against collection names;
657 - any non-mapping iterable containing any of the above;
658 - a mapping from `str` to any expression accepted by
659 `DatasetTypeRestriction`.
660 - a `CollectionSearch` instance;
661 - another `CollectionQuery` instance (passed through unchanged).
663 Multiple consecutive entries for the same collection with different
664 restrictions will be merged. Non-consecutive entries will not,
665 because that actually represents a different search path.
667 Returns
668 -------
669 collections : `CollectionQuery`
670 A `CollectionQuery` instance.
671 """
672 if isinstance(expression, cls):
673 return expression
674 if expression is ...:
675 return cls.any
676 if isinstance(expression, CollectionSearch):
677 return cls(search=expression, patterns=())
678 wildcard = CategorizedWildcard.fromExpression(expression,
679 allowAny=True,
680 allowPatterns=True,
681 coerceItemValue=DatasetTypeRestriction.fromExpression,
682 defaultItemValue=DatasetTypeRestriction.any)
683 if wildcard is ...:
684 return cls.any
685 assert not wildcard.strings
686 return cls(search=CollectionSearch.fromExpression(wildcard),
687 patterns=tuple(wildcard.patterns))
689 def iter(
690 self, manager: CollectionManager, *,
691 datasetType: Optional[DatasetType] = None,
692 collectionType: Optional[CollectionType] = None,
693 withRestrictions: bool = False,
694 flattenChains: bool = True,
695 includeChains: Optional[bool] = None,
696 ) -> Iterator[CollectionRecord]:
697 """Iterate over collection records that match this instance and the
698 given criteria, in an arbitrary order.
700 This method is primarily intended for internal use by `Registry`;
701 other callers should generally prefer `Registry.queryDatasets` or
702 other `Registry` query methods.
704 Parameters
705 ----------
706 manager : `CollectionManager`
707 Object responsible for managing the collection tables in a
708 `Registry`.
709 datasetType : `DatasetType`, optional
710 If given, only yield collections whose dataset type restrictions
711 include this dataset type.
712 collectionType : `CollectionType`, optional
713 If given, only yield collections of this type.
714 withRestrictions : `bool`, optional
715 If `True` (`False` is default) yield the associated
716 `DatasetTypeRestriction` along with each `CollectionRecord`.
717 flattenChains : `bool`, optional
718 If `True` (default) recursively yield the child collections of
719 `~CollectionType.CHAINED` colelctions.
720 includeChains : `bool`, optional
721 If `False`, return records for `~CollectionType.CHAINED`
722 collections themselves. The default is the opposite of
723 ``flattenChains``: either return records for CHAINED collections or
724 their children, but not both.
725 """
726 if self._search is ...:
727 for record in manager:
728 yield from _yieldCollectionRecords(
729 manager,
730 record,
731 DatasetTypeRestriction.any,
732 datasetType=datasetType,
733 collectionType=collectionType,
734 withRestrictions=withRestrictions,
735 flattenChains=flattenChains,
736 includeChains=includeChains,
737 )
738 else:
739 done = set()
740 yield from self._search.iter(
741 manager,
742 datasetType=datasetType,
743 collectionType=collectionType,
744 withRestrictions=withRestrictions,
745 done=done,
746 flattenChains=flattenChains,
747 includeChains=includeChains,
748 )
749 for record in manager:
750 if record.name not in done and any(p.fullmatch(record.name) for p in self._patterns):
751 yield from _yieldCollectionRecords(
752 manager,
753 record,
754 DatasetTypeRestriction.any,
755 datasetType=datasetType,
756 collectionType=collectionType,
757 withRestrictions=withRestrictions,
758 done=done,
759 flattenChains=flattenChains,
760 includeChains=includeChains,
761 )
764CollectionQuery.any = CollectionQuery(..., None)
765"""A special `CollectionQuery` instance that matches any collection.
767This instance should be preferred instead of constructing a new one with `...`,
768when possible, but it should not be assumed to be the only such instance.
769"""