Coverage for python/lsst/pipe/base/_task_metadata.py: 14%
208 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-12 11:14 -0700
« prev ^ index » next coverage.py v7.2.7, created at 2023-07-12 11:14 -0700
1# This file is part of pipe_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22__all__ = ["TaskMetadata"]
24import itertools
25import numbers
26import warnings
27from collections.abc import Collection, Iterator, Mapping, Sequence
28from typing import Any, Protocol
30try:
31 from pydantic.v1 import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr
32except ModuleNotFoundError:
33 from pydantic import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr # type: ignore
36_DEPRECATION_REASON = "Will be removed after v25."
37_DEPRECATION_VERSION = "v24"
39# The types allowed in a Task metadata field are restricted
40# to allow predictable serialization.
41_ALLOWED_PRIMITIVE_TYPES = (str, float, int, bool)
44class PropertySetLike(Protocol):
45 """Protocol that looks like a ``lsst.daf.base.PropertySet``
47 Enough of the API is specified to support conversion of a
48 ``PropertySet`` to a `TaskMetadata`.
49 """
51 def paramNames(self, topLevelOnly: bool = True) -> Collection[str]:
52 ...
54 def getArray(self, name: str) -> Any:
55 ...
58def _isListLike(v: Any) -> bool:
59 return isinstance(v, Sequence) and not isinstance(v, str)
62class TaskMetadata(BaseModel):
63 """Dict-like object for storing task metadata.
65 Metadata can be stored at two levels: single task or task plus subtasks.
66 The later is called full metadata of a task and has a form
68 topLevelTaskName:subtaskName:subsubtaskName.itemName
70 Metadata item key of a task (`itemName` above) must not contain `.`,
71 which serves as a separator in full metadata keys and turns
72 the value into sub-dictionary. Arbitrary hierarchies are supported.
73 """
75 scalars: dict[str, StrictFloat | StrictInt | StrictBool | StrictStr] = Field(default_factory=dict)
76 arrays: dict[str, list[StrictFloat] | list[StrictInt] | list[StrictBool] | list[StrictStr]] = Field(
77 default_factory=dict
78 )
79 metadata: dict[str, "TaskMetadata"] = Field(default_factory=dict)
81 @classmethod
82 def from_dict(cls, d: Mapping[str, Any]) -> "TaskMetadata":
83 """Create a TaskMetadata from a dictionary.
85 Parameters
86 ----------
87 d : `~collections.abc.Mapping`
88 Mapping to convert. Can be hierarchical. Any dictionaries
89 in the hierarchy are converted to `TaskMetadata`.
91 Returns
92 -------
93 meta : `TaskMetadata`
94 Newly-constructed metadata.
95 """
96 metadata = cls()
97 for k, v in d.items():
98 metadata[k] = v
99 return metadata
101 @classmethod
102 def from_metadata(cls, ps: PropertySetLike) -> "TaskMetadata":
103 """Create a TaskMetadata from a PropertySet-like object.
105 Parameters
106 ----------
107 ps : `PropertySetLike` or `TaskMetadata`
108 A ``PropertySet``-like object to be transformed to a
109 `TaskMetadata`. A `TaskMetadata` can be copied using this
110 class method.
112 Returns
113 -------
114 tm : `TaskMetadata`
115 Newly-constructed metadata.
117 Notes
118 -----
119 Items stored in single-element arrays in the supplied object
120 will be converted to scalars in the newly-created object.
121 """
122 # Use hierarchical names to assign values from input to output.
123 # This API exists for both PropertySet and TaskMetadata.
124 # from_dict() does not work because PropertySet is not declared
125 # to be a Mapping.
126 # PropertySet.toDict() is not present in TaskMetadata so is best
127 # avoided.
128 metadata = cls()
129 for key in sorted(ps.paramNames(topLevelOnly=False)):
130 value = ps.getArray(key)
131 if len(value) == 1:
132 value = value[0]
133 metadata[key] = value
134 return metadata
136 def to_dict(self) -> dict[str, Any]:
137 """Convert the class to a simple dictionary.
139 Returns
140 -------
141 d : `dict`
142 Simple dictionary that can contain scalar values, array values
143 or other dictionary values.
145 Notes
146 -----
147 Unlike `dict()`, this method hides the model layout and combines
148 scalars, arrays, and other metadata in the same dictionary. Can be
149 used when a simple dictionary is needed. Use
150 `TaskMetadata.from_dict()` to convert it back.
151 """
152 d: dict[str, Any] = {}
153 d.update(self.scalars)
154 d.update(self.arrays)
155 for k, v in self.metadata.items():
156 d[k] = v.to_dict()
157 return d
159 def add(self, name: str, value: Any) -> None:
160 """Store a new value, adding to a list if one already exists.
162 Parameters
163 ----------
164 name : `str`
165 Name of the metadata property.
166 value
167 Metadata property value.
168 """
169 keys = self._getKeys(name)
170 key0 = keys.pop(0)
171 if len(keys) == 0:
172 # If add() is being used, always store the value in the arrays
173 # property as a list. It's likely there will be another call.
174 slot_type, value = self._validate_value(value)
175 if slot_type == "array":
176 pass
177 elif slot_type == "scalar":
178 value = [value]
179 else:
180 raise ValueError("add() can only be used for primitive types or sequences of those types.")
182 if key0 in self.metadata:
183 raise ValueError(f"Can not add() to key '{name}' since that is a TaskMetadata")
185 if key0 in self.scalars:
186 # Convert scalar to array.
187 # MyPy should be able to figure out that List[Union[T1, T2]] is
188 # compatible with Union[List[T1], List[T2]] if the list has
189 # only one element, but it can't.
190 self.arrays[key0] = [self.scalars.pop(key0)] # type: ignore
192 if key0 in self.arrays:
193 # Check that the type is not changing.
194 if (curtype := type(self.arrays[key0][0])) is not (newtype := type(value[0])):
195 raise ValueError(f"Type mismatch in add() -- currently {curtype} but adding {newtype}")
196 self.arrays[key0].extend(value)
197 else:
198 self.arrays[key0] = value
200 return
202 self.metadata[key0].add(".".join(keys), value)
204 def getScalar(self, key: str) -> str | int | float | bool:
205 """Retrieve a scalar item even if the item is a list.
207 Parameters
208 ----------
209 key : `str`
210 Item to retrieve.
212 Returns
213 -------
214 value : `str`, `int`, `float`, or `bool`
215 Either the value associated with the key or, if the key
216 corresponds to a list, the last item in the list.
218 Raises
219 ------
220 KeyError
221 Raised if the item is not found.
222 """
223 # Used in pipe_tasks.
224 # getScalar() is the default behavior for __getitem__.
225 return self[key]
227 def getArray(self, key: str) -> list[Any]:
228 """Retrieve an item as a list even if it is a scalar.
230 Parameters
231 ----------
232 key : `str`
233 Item to retrieve.
235 Returns
236 -------
237 values : `list` of any
238 A list containing the value or values associated with this item.
240 Raises
241 ------
242 KeyError
243 Raised if the item is not found.
244 """
245 keys = self._getKeys(key)
246 key0 = keys.pop(0)
247 if len(keys) == 0:
248 if key0 in self.arrays:
249 return self.arrays[key0]
250 elif key0 in self.scalars:
251 return [self.scalars[key0]]
252 elif key0 in self.metadata:
253 return [self.metadata[key0]]
254 raise KeyError(f"'{key}' not found")
256 try:
257 return self.metadata[key0].getArray(".".join(keys))
258 except KeyError:
259 # Report the correct key.
260 raise KeyError(f"'{key}' not found") from None
262 def names(self, topLevelOnly: bool = True) -> set[str]:
263 """Return the hierarchical keys from the metadata.
265 Parameters
266 ----------
267 topLevelOnly : `bool`
268 If true, return top-level keys, otherwise full metadata item keys.
270 Returns
271 -------
272 names : `collections.abc.Set`
273 A set of top-level keys or full metadata item keys, including
274 the top-level keys.
276 Notes
277 -----
278 Should never be called in new code with ``topLevelOnly`` set to `True`
279 -- this is equivalent to asking for the keys and is the default
280 when iterating through the task metadata. In this case a deprecation
281 message will be issued and the ability will raise an exception
282 in a future release.
284 When ``topLevelOnly`` is `False` all keys, including those from the
285 hierarchy and the top-level hierarchy, are returned.
286 """
287 if topLevelOnly:
288 warnings.warn("Use keys() instead. " + _DEPRECATION_REASON, FutureWarning)
289 return set(self.keys())
290 else:
291 names = set()
292 for k, v in self.items():
293 names.add(k) # Always include the current level
294 if isinstance(v, TaskMetadata):
295 names.update({k + "." + item for item in v.names(topLevelOnly=topLevelOnly)})
296 return names
298 def paramNames(self, topLevelOnly: bool) -> set[str]:
299 """Return hierarchical names.
301 Parameters
302 ----------
303 topLevelOnly : `bool`
304 Control whether only top-level items are returned or items
305 from the hierarchy.
307 Returns
308 -------
309 paramNames : `set` of `str`
310 If ``topLevelOnly`` is `True`, returns any keys that are not
311 part of a hierarchy. If `False` also returns fully-qualified
312 names from the hierarchy. Keys associated with the top
313 of a hierarchy are never returned.
314 """
315 # Currently used by the verify package.
316 paramNames = set()
317 for k, v in self.items():
318 if isinstance(v, TaskMetadata):
319 if not topLevelOnly:
320 paramNames.update({k + "." + item for item in v.paramNames(topLevelOnly=topLevelOnly)})
321 else:
322 paramNames.add(k)
323 return paramNames
325 @staticmethod
326 def _getKeys(key: str) -> list[str]:
327 """Return the key hierarchy.
329 Parameters
330 ----------
331 key : `str`
332 The key to analyze. Can be dot-separated.
334 Returns
335 -------
336 keys : `list` of `str`
337 The key hierarchy that has been split on ``.``.
339 Raises
340 ------
341 KeyError
342 Raised if the key is not a string.
343 """
344 try:
345 keys = key.split(".")
346 except Exception:
347 raise KeyError(f"Invalid key '{key}': only string keys are allowed") from None
348 return keys
350 def keys(self) -> tuple[str, ...]:
351 """Return the top-level keys."""
352 return tuple(k for k in self)
354 def items(self) -> Iterator[tuple[str, Any]]:
355 """Yield the top-level keys and values."""
356 for k, v in itertools.chain(self.scalars.items(), self.arrays.items(), self.metadata.items()):
357 yield (k, v)
359 def __len__(self) -> int:
360 """Return the number of items."""
361 return len(self.scalars) + len(self.arrays) + len(self.metadata)
363 # This is actually a Liskov substitution violation, because
364 # pydantic.BaseModel says __iter__ should return something else. But the
365 # pydantic docs say to do exactly this to in order to make a mapping-like
366 # BaseModel, so that's what we do.
367 def __iter__(self) -> Iterator[str]: # type: ignore
368 """Return an iterator over each key."""
369 # The order of keys is not preserved since items can move
370 # from scalar to array.
371 return itertools.chain(iter(self.scalars), iter(self.arrays), iter(self.metadata))
373 def __getitem__(self, key: str) -> Any:
374 """Retrieve the item associated with the key.
376 Parameters
377 ----------
378 key : `str`
379 The key to retrieve. Can be dot-separated hierarchical.
381 Returns
382 -------
383 value : `TaskMetadata`, `float`, `int`, `bool`, `str`
384 A scalar value. For compatibility with ``PropertySet``, if the key
385 refers to an array, the final element is returned and not the
386 array itself.
388 Raises
389 ------
390 KeyError
391 Raised if the item is not found.
392 """
393 keys = self._getKeys(key)
394 key0 = keys.pop(0)
395 if len(keys) == 0:
396 if key0 in self.scalars:
397 return self.scalars[key0]
398 if key0 in self.metadata:
399 return self.metadata[key0]
400 if key0 in self.arrays:
401 return self.arrays[key0][-1]
402 raise KeyError(f"'{key}' not found")
403 # Hierarchical lookup so the top key can only be in the metadata
404 # property. Trap KeyError and reraise so that the correct key
405 # in the hierarchy is reported.
406 try:
407 # And forward request to that metadata.
408 return self.metadata[key0][".".join(keys)]
409 except KeyError:
410 raise KeyError(f"'{key}' not found") from None
412 def get(self, key: str, default: Any = None) -> Any:
413 """Retrieve the item associated with the key or a default.
415 Parameters
416 ----------
417 key : `str`
418 The key to retrieve. Can be dot-separated hierarchical.
419 default
420 The value to return if the key doesnot exist.
422 Returns
423 -------
424 value : `TaskMetadata`, `float`, `int`, `bool`, `str`
425 A scalar value. If the key refers to an array, the final element
426 is returned and not the array itself; this is consistent with
427 `__getitem__` and `PropertySet.get`, but not ``to_dict().get``.
428 """
429 try:
430 return self[key]
431 except KeyError:
432 return default
434 def __setitem__(self, key: str, item: Any) -> None:
435 """Store the given item."""
436 keys = self._getKeys(key)
437 key0 = keys.pop(0)
438 if len(keys) == 0:
439 slots: dict[str, dict[str, Any]] = {
440 "array": self.arrays,
441 "scalar": self.scalars,
442 "metadata": self.metadata,
443 }
444 primary: dict[str, Any] | None = None
445 slot_type, item = self._validate_value(item)
446 primary = slots.pop(slot_type, None)
447 if primary is None:
448 raise AssertionError(f"Unknown slot type returned from validator: {slot_type}")
450 # Assign the value to the right place.
451 primary[key0] = item
452 for property in slots.values():
453 # Remove any other entries.
454 property.pop(key0, None)
455 return
457 # This must be hierarchical so forward to the child TaskMetadata.
458 if key0 not in self.metadata:
459 self.metadata[key0] = TaskMetadata()
460 self.metadata[key0][".".join(keys)] = item
462 # Ensure we have cleared out anything with the same name elsewhere.
463 self.scalars.pop(key0, None)
464 self.arrays.pop(key0, None)
466 def __contains__(self, key: str) -> bool:
467 """Determine if the key exists."""
468 keys = self._getKeys(key)
469 key0 = keys.pop(0)
470 if len(keys) == 0:
471 return key0 in self.scalars or key0 in self.arrays or key0 in self.metadata
473 if key0 in self.metadata:
474 return ".".join(keys) in self.metadata[key0]
475 return False
477 def __delitem__(self, key: str) -> None:
478 """Remove the specified item.
480 Raises
481 ------
482 KeyError
483 Raised if the item is not present.
484 """
485 keys = self._getKeys(key)
486 key0 = keys.pop(0)
487 if len(keys) == 0:
488 # MyPy can't figure out that this way to combine the types in the
489 # tuple is the one that matters, and annotating a local variable
490 # helps it out.
491 properties: tuple[dict[str, Any], ...] = (self.scalars, self.arrays, self.metadata)
492 for property in properties:
493 if key0 in property:
494 del property[key0]
495 return
496 raise KeyError(f"'{key}' not found'")
498 try:
499 del self.metadata[key0][".".join(keys)]
500 except KeyError:
501 # Report the correct key.
502 raise KeyError(f"'{key}' not found'") from None
504 def _validate_value(self, value: Any) -> tuple[str, Any]:
505 """Validate the given value.
507 Parameters
508 ----------
509 value : Any
510 Value to check.
512 Returns
513 -------
514 slot_type : `str`
515 The type of value given. Options are "scalar", "array", "metadata".
516 item : Any
517 The item that was given but possibly modified to conform to
518 the slot type.
520 Raises
521 ------
522 ValueError
523 Raised if the value is not a recognized type.
524 """
525 # Test the simplest option first.
526 value_type = type(value)
527 if value_type in _ALLOWED_PRIMITIVE_TYPES:
528 return "scalar", value
530 if isinstance(value, TaskMetadata):
531 return "metadata", value
532 if isinstance(value, Mapping):
533 return "metadata", self.from_dict(value)
535 if _isListLike(value):
536 # For model consistency, need to check that every item in the
537 # list has the same type.
538 value = list(value)
540 type0 = type(value[0])
541 for i in value:
542 if type(i) != type0:
543 raise ValueError(
544 "Type mismatch in supplied list. TaskMetadata requires all"
545 f" elements have same type but see {type(i)} and {type0}."
546 )
548 if type0 not in _ALLOWED_PRIMITIVE_TYPES:
549 # Must check to see if we got numpy floats or something.
550 type_cast: type
551 if isinstance(value[0], numbers.Integral):
552 type_cast = int
553 elif isinstance(value[0], numbers.Real):
554 type_cast = float
555 else:
556 raise ValueError(
557 f"Supplied list has element of type '{type0}'. "
558 "TaskMetadata can only accept primitive types in lists."
559 )
561 value = [type_cast(v) for v in value]
563 return "array", value
565 # Sometimes a numpy number is given.
566 if isinstance(value, numbers.Integral):
567 value = int(value)
568 return "scalar", value
569 if isinstance(value, numbers.Real):
570 value = float(value)
571 return "scalar", value
573 raise ValueError(f"TaskMetadata does not support values of type {value!r}.")
576# Needed because a TaskMetadata can contain a TaskMetadata.
577TaskMetadata.update_forward_refs()