Coverage for python/lsst/daf/butler/datastore/constraints.py: 26%
52 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-10 10:14 +0000
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-10 10:14 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28"""Code relating to constraints based on `DatasetRef`, `DatasetType`, or
29`StorageClass`.
30"""
32from __future__ import annotations
34__all__ = ("Constraints", "ConstraintsValidationError", "ConstraintsConfig")
36import logging
37from typing import TYPE_CHECKING
39from .._config import Config
40from .._config_support import LookupKey, processLookupConfigList
41from .._exceptions import ValidationError
43if TYPE_CHECKING:
44 from .._dataset_ref import DatasetRef
45 from .._dataset_type import DatasetType
46 from .._storage_class import StorageClass
47 from ..dimensions import DimensionUniverse
49log = logging.getLogger(__name__)
52class ConstraintsValidationError(ValidationError):
53 """Thrown when a constraints list has mutually exclusive definitions."""
55 pass
58class ConstraintsConfig(Config):
59 """Configuration information for `Constraints`."""
61 pass
64class Constraints:
65 """Determine whether an entity is allowed to be handled.
67 Supported entities are `DatasetRef`, `DatasetType`, or `StorageClass`.
69 Parameters
70 ----------
71 config : `ConstraintsConfig` or `str`
72 Load configuration. If `None` then this is equivalent to having
73 no restrictions.
74 universe : `DimensionUniverse`
75 The set of all known dimensions, used to normalize any lookup keys
76 involving dimensions.
77 """
79 matchAllKey = LookupKey("all")
80 """Configuration key associated with matching everything."""
82 def __init__(self, config: ConstraintsConfig | str | None, *, universe: DimensionUniverse):
83 # Default is to accept all and reject nothing
84 self._accept = set()
85 self._reject = set()
87 if config is not None:
88 self.config = ConstraintsConfig(config)
90 if "accept" in self.config:
91 self._accept = processLookupConfigList(self.config["accept"], universe=universe)
92 if "reject" in self.config:
93 self._reject = processLookupConfigList(self.config["reject"], universe=universe)
95 if self.matchAllKey in self._accept and self.matchAllKey in self._reject:
96 raise ConstraintsValidationError(
97 "Can not explicitly accept 'all' and reject 'all' in one configuration"
98 )
100 def __str__(self) -> str:
101 # Standard stringification
102 if not self._accept and not self._reject:
103 return "Accepts: all"
105 accepts = ", ".join(str(k) for k in self._accept)
106 rejects = ", ".join(str(k) for k in self._reject)
107 return f"Accepts: {accepts}; Rejects: {rejects}"
109 def isAcceptable(self, entity: DatasetRef | DatasetType | StorageClass) -> bool:
110 """Check whether the supplied entity will be acceptable.
112 Parameters
113 ----------
114 entity : `DatasetType`, `DatasetRef`, or `StorageClass`
115 Instance to use to look in constraints table.
116 The entity itself reports the `LookupKey` that is relevant.
118 Returns
119 -------
120 allowed : `bool`
121 `True` if the entity is allowed.
122 """
123 # Get the names to use for lookup
124 names = set(entity._lookupNames())
126 # Test if this entity is explicitly mentioned for accept/reject
127 isExplicitlyAccepted = bool(names & self._accept)
129 if isExplicitlyAccepted:
130 return True
132 isExplicitlyRejected = bool(names & self._reject)
134 if isExplicitlyRejected:
135 return False
137 # Now look for wildcard match -- we have to also check for dataId
138 # overrides
140 # Generate a new set of lookup keys that use the wildcard name
141 # but the supplied dimensions
142 wildcards = {k.clone(name=self.matchAllKey.name) for k in names}
144 isWildcardAccepted = bool(wildcards & self._accept)
145 isWildcardRejected = bool(wildcards & self._reject)
147 if isWildcardRejected:
148 return False
150 # If all the wildcard and explicit rejections have failed then
151 # if the accept list is empty, or if a wildcard acceptance worked
152 # we can accept, else reject
153 if isWildcardAccepted or not self._accept:
154 return True
156 return False
158 def getLookupKeys(self) -> set[LookupKey]:
159 """Retrieve the look up keys for all the constraints entries.
161 Returns
162 -------
163 keys : `set` of `LookupKey`
164 The keys available for determining constraints. Does not include
165 the special "all" lookup key.
166 """
167 all = self._accept | self._accept
168 return {a for a in all if a.name != self.matchAllKey.name}