Coverage for python/lsst/daf/butler/core/constraints.py: 30%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24"""Code relating to constraints based on `DatasetRef`, `DatasetType`, or
25`StorageClass`."""
27__all__ = ("Constraints", "ConstraintsValidationError", "ConstraintsConfig")
29from typing import (
30 TYPE_CHECKING,
31 Optional,
32 Set,
33 Union,
34)
36import logging
37from .config import Config
38from .configSupport import LookupKey, processLookupConfigList
39from .exceptions import ValidationError
41if TYPE_CHECKING: 41 ↛ 42line 41 didn't jump to line 42, because the condition on line 41 was never true
42 from .dimensions import DimensionUniverse
43 from .storageClass import StorageClass
44 from .datasets import DatasetRef, DatasetType
46log = logging.getLogger(__name__)
49class ConstraintsValidationError(ValidationError):
50 """Thrown when a constraints list has mutually exclusive definitions."""
52 pass
55class ConstraintsConfig(Config):
56 """Configuration information for `Constraints`."""
58 pass
61class Constraints:
62 """Determine whether an entity is allowed to be handled.
64 Supported entities are `DatasetRef`, `DatasetType`, or `StorageClass`.
66 Parameters
67 ----------
68 config : `ConstraintsConfig` or `str`
69 Load configuration. If `None` then this is equivalent to having
70 no restrictions.
71 universe : `DimensionUniverse`
72 The set of all known dimensions, used to normalize any lookup keys
73 involving dimensions.
74 """
76 matchAllKey = LookupKey("all")
77 """Configuration key associated with matching everything."""
79 def __init__(self, config: Optional[Union[ConstraintsConfig, str]], *, universe: DimensionUniverse):
80 # Default is to accept all and reject nothing
81 self._accept = set()
82 self._reject = set()
84 if config is not None:
85 self.config = ConstraintsConfig(config)
87 if "accept" in self.config:
88 self._accept = processLookupConfigList(self.config["accept"], universe=universe)
89 if "reject" in self.config:
90 self._reject = processLookupConfigList(self.config["reject"], universe=universe)
92 if self.matchAllKey in self._accept and self.matchAllKey in self._reject:
93 raise ConstraintsValidationError("Can not explicitly accept 'all' and reject 'all'"
94 " in one configuration")
96 def __str__(self) -> str:
97 # Standard stringification
98 if not self._accept and not self._reject:
99 return "Accepts: all"
101 accepts = ", ".join(str(k) for k in self._accept)
102 rejects = ", ".join(str(k) for k in self._reject)
103 return f"Accepts: {accepts}; Rejects: {rejects}"
105 def isAcceptable(self, entity: Union[DatasetRef, DatasetType, StorageClass]) -> bool:
106 """Check whether the supplied entity will be acceptable.
108 Parameters
109 ----------
110 entity : `DatasetType`, `DatasetRef`, or `StorageClass`
111 Instance to use to look in constraints table.
112 The entity itself reports the `LookupKey` that is relevant.
114 Returns
115 -------
116 allowed : `bool`
117 `True` if the entity is allowed.
118 """
119 # Get the names to use for lookup
120 names = set(entity._lookupNames())
122 # Test if this entity is explicitly mentioned for accept/reject
123 isExplicitlyAccepted = bool(names & self._accept)
125 if isExplicitlyAccepted:
126 return True
128 isExplicitlyRejected = bool(names & self._reject)
130 if isExplicitlyRejected:
131 return False
133 # Now look for wildcard match -- we have to also check for dataId
134 # overrides
136 # Generate a new set of lookup keys that use the wildcard name
137 # but the supplied dimensions
138 wildcards = {k.clone(name=self.matchAllKey.name) for k in names}
140 isWildcardAccepted = bool(wildcards & self._accept)
141 isWildcardRejected = bool(wildcards & self._reject)
143 if isWildcardRejected:
144 return False
146 # If all the wildcard and explicit rejections have failed then
147 # if the accept list is empty, or if a wildcard acceptance worked
148 # we can accept, else reject
149 if isWildcardAccepted or not self._accept:
150 return True
152 return False
154 def getLookupKeys(self) -> Set[LookupKey]:
155 """Retrieve the look up keys for all the constraints entries.
157 Returns
158 -------
159 keys : `set` of `LookupKey`
160 The keys available for determining constraints. Does not include
161 the special "all" lookup key.
162 """
163 all = self._accept | self._accept
164 return set(a for a in all if a.name != self.matchAllKey.name)