Coverage for python/lsst/daf/butler/core/constraints.py: 26%
52 statements
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-09 02:11 -0700
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-09 02:11 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24"""Code relating to constraints based on `DatasetRef`, `DatasetType`, or
25`StorageClass`."""
27__all__ = ("Constraints", "ConstraintsValidationError", "ConstraintsConfig")
29import logging
30from typing import TYPE_CHECKING, Optional, Set, Union
32from .config import Config
33from .configSupport import LookupKey, processLookupConfigList
34from .exceptions import ValidationError
36if TYPE_CHECKING:
37 from .datasets import DatasetRef, DatasetType
38 from .dimensions import DimensionUniverse
39 from .storageClass import StorageClass
41log = logging.getLogger(__name__)
44class ConstraintsValidationError(ValidationError):
45 """Thrown when a constraints list has mutually exclusive definitions."""
47 pass
50class ConstraintsConfig(Config):
51 """Configuration information for `Constraints`."""
53 pass
56class Constraints:
57 """Determine whether an entity is allowed to be handled.
59 Supported entities are `DatasetRef`, `DatasetType`, or `StorageClass`.
61 Parameters
62 ----------
63 config : `ConstraintsConfig` or `str`
64 Load configuration. If `None` then this is equivalent to having
65 no restrictions.
66 universe : `DimensionUniverse`
67 The set of all known dimensions, used to normalize any lookup keys
68 involving dimensions.
69 """
71 matchAllKey = LookupKey("all")
72 """Configuration key associated with matching everything."""
74 def __init__(self, config: Optional[Union[ConstraintsConfig, str]], *, universe: DimensionUniverse):
75 # Default is to accept all and reject nothing
76 self._accept = set()
77 self._reject = set()
79 if config is not None:
80 self.config = ConstraintsConfig(config)
82 if "accept" in self.config:
83 self._accept = processLookupConfigList(self.config["accept"], universe=universe)
84 if "reject" in self.config:
85 self._reject = processLookupConfigList(self.config["reject"], universe=universe)
87 if self.matchAllKey in self._accept and self.matchAllKey in self._reject:
88 raise ConstraintsValidationError(
89 "Can not explicitly accept 'all' and reject 'all' in one configuration"
90 )
92 def __str__(self) -> str:
93 # Standard stringification
94 if not self._accept and not self._reject:
95 return "Accepts: all"
97 accepts = ", ".join(str(k) for k in self._accept)
98 rejects = ", ".join(str(k) for k in self._reject)
99 return f"Accepts: {accepts}; Rejects: {rejects}"
101 def isAcceptable(self, entity: Union[DatasetRef, DatasetType, StorageClass]) -> bool:
102 """Check whether the supplied entity will be acceptable.
104 Parameters
105 ----------
106 entity : `DatasetType`, `DatasetRef`, or `StorageClass`
107 Instance to use to look in constraints table.
108 The entity itself reports the `LookupKey` that is relevant.
110 Returns
111 -------
112 allowed : `bool`
113 `True` if the entity is allowed.
114 """
115 # Get the names to use for lookup
116 names = set(entity._lookupNames())
118 # Test if this entity is explicitly mentioned for accept/reject
119 isExplicitlyAccepted = bool(names & self._accept)
121 if isExplicitlyAccepted:
122 return True
124 isExplicitlyRejected = bool(names & self._reject)
126 if isExplicitlyRejected:
127 return False
129 # Now look for wildcard match -- we have to also check for dataId
130 # overrides
132 # Generate a new set of lookup keys that use the wildcard name
133 # but the supplied dimensions
134 wildcards = {k.clone(name=self.matchAllKey.name) for k in names}
136 isWildcardAccepted = bool(wildcards & self._accept)
137 isWildcardRejected = bool(wildcards & self._reject)
139 if isWildcardRejected:
140 return False
142 # If all the wildcard and explicit rejections have failed then
143 # if the accept list is empty, or if a wildcard acceptance worked
144 # we can accept, else reject
145 if isWildcardAccepted or not self._accept:
146 return True
148 return False
150 def getLookupKeys(self) -> Set[LookupKey]:
151 """Retrieve the look up keys for all the constraints entries.
153 Returns
154 -------
155 keys : `set` of `LookupKey`
156 The keys available for determining constraints. Does not include
157 the special "all" lookup key.
158 """
159 all = self._accept | self._accept
160 return set(a for a in all if a.name != self.matchAllKey.name)