Coverage for python/lsst/daf/butler/dimensions/_coordinate.py: 46%
455 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-11 03:16 -0700
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-11 03:16 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28#
29# Design notes for this module are in
30# doc/lsst.daf.butler/dev/dataCoordinate.py.
31#
33from __future__ import annotations
35__all__ = (
36 "DataCoordinate",
37 "DataId",
38 "DataIdKey",
39 "DataIdValue",
40 "SerializedDataCoordinate",
41 "SerializedDataId",
42)
44import numbers
45import warnings
46from abc import abstractmethod
47from collections.abc import Iterable, Iterator, Mapping, Set
48from typing import TYPE_CHECKING, Any, ClassVar, cast
50import pydantic
51from deprecated.sphinx import deprecated
52from lsst.sphgeom import IntersectionRegion, Region
53from lsst.utils.introspection import find_outside_stacklevel
55from .._exceptions import DimensionNameError
56from .._named import NamedKeyMapping, NamedValueAbstractSet, NameLookupMapping
57from .._timespan import Timespan
58from ..json import from_json_pydantic, to_json_pydantic
59from ..persistence_context import PersistenceContextVars
60from ._elements import Dimension, DimensionElement
61from ._graph import DimensionGraph
62from ._group import DimensionGroup
63from ._records import DimensionRecord, SerializedDimensionRecord
65if TYPE_CHECKING: # Imports needed only for type annotations; may be circular.
66 from ..registry import Registry
67 from ._universe import DimensionUniverse
69DataIdKey = str | Dimension
70"""Type annotation alias for the keys that can be used to index a
71DataCoordinate.
72"""
74# Pydantic will cast int to str if str is first in the Union.
75DataIdValue = int | str | None
76"""Type annotation alias for the values that can be present in a
77DataCoordinate or other data ID.
78"""
80SerializedDataId = dict[str, DataIdValue]
81"""Simplified model for serializing the ``mapping`` property of
82`DataCoordinate`.
83"""
86class SerializedDataCoordinate(pydantic.BaseModel):
87 """Simplified model for serializing a `DataCoordinate`."""
89 dataId: SerializedDataId
90 records: dict[str, SerializedDimensionRecord] | None = None
92 @classmethod
93 def direct(cls, *, dataId: SerializedDataId, records: dict[str, dict] | None) -> SerializedDataCoordinate:
94 """Construct a `SerializedDataCoordinate` directly without validators.
96 Parameters
97 ----------
98 dataId : `SerializedDataId`
99 The data ID.
100 records : `dict` or `None`
101 The dimension records.
103 Notes
104 -----
105 This differs from the pydantic "construct" method in that the arguments
106 are explicitly what the model requires, and it will recurse through
107 members, constructing them from their corresponding `direct` methods.
109 This method should only be called when the inputs are trusted.
110 """
111 key = (frozenset(dataId.items()), records is not None)
112 cache = PersistenceContextVars.serializedDataCoordinateMapping.get()
113 if cache is not None and (result := cache.get(key)) is not None:
114 return result
116 if records is None:
117 serialized_records = None
118 else:
119 serialized_records = {k: SerializedDimensionRecord.direct(**v) for k, v in records.items()}
121 node = cls.model_construct(dataId=dataId, records=serialized_records)
123 if cache is not None:
124 cache[key] = node
125 return node
128def _intersectRegions(*args: Region) -> Region | None:
129 """Return the intersection of several regions.
131 For internal use by `ExpandedDataCoordinate` only.
133 If no regions are provided, returns `None`.
134 """
135 if len(args) == 0:
136 return None
137 else:
138 result = args[0]
139 for n in range(1, len(args)):
140 result = IntersectionRegion(result, args[n])
141 return result
144class DataCoordinate(NamedKeyMapping[Dimension, DataIdValue]):
145 """A validated data ID.
147 DataCoordinate guarantees that its key-value pairs identify at least all
148 required dimensions in a `DimensionGroup`.
150 Notes
151 -----
152 `DataCoordinate` is an ABC, but it provides `staticmethod` factory
153 functions for private concrete implementations that should be sufficient
154 for most purposes. `standardize` is the most flexible and safe of these;
155 the others (`make_empty`, `from_required_values`, and `from_full_values`)
156 are more specialized and perform little or no checking of inputs.
158 Lookups for implied dimensions (those in ``self.dimensions.implied``) are
159 supported if and only if `has_full_values` is `True`. This also sets the
160 keys of the `mapping` attribute. This means that `DataCoordinate` equality
161 is not the same as testing for equality on the `mapping` attribute
162 (instead, it is the same as testing for equality on the `required`
163 attribute).
165 See also :ref:`lsst.daf.butler-dimensions_data_ids`
166 """
168 __slots__ = ()
170 _serializedType: ClassVar[type[pydantic.BaseModel]] = SerializedDataCoordinate
172 @staticmethod
173 def standardize(
174 mapping: NameLookupMapping[Dimension, DataIdValue] | None = None,
175 *,
176 dimensions: Iterable[str] | DimensionGroup | DimensionGraph | None = None,
177 graph: DimensionGraph | None = None,
178 universe: DimensionUniverse | None = None,
179 defaults: DataCoordinate | None = None,
180 **kwargs: Any,
181 ) -> DataCoordinate:
182 """Standardize the supplied dataId.
184 Adapts an arbitrary mapping and/or additional arguments into a true
185 `DataCoordinate`, or augment an existing one.
187 Parameters
188 ----------
189 mapping : `~collections.abc.Mapping`, optional
190 An informal data ID that maps dimensions or dimension names to
191 their primary key values (may also be a true `DataCoordinate`).
192 dimensions : `~collections.abc.Iterable` [ `str` ], `DimensionGroup` \
193 or `DimensionGraph`, optional
194 The dimensions to be identified by the new `DataCoordinate`. If not
195 provided, will be inferred from the keys of ``mapping`` and
196 ``**kwargs``, and ``universe`` must be provided unless ``mapping``
197 is already a `DataCoordinate`.
198 graph : `DimensionGraph`, optional
199 Like ``dimensions``, but requires a ``DimensionGraph`` instance.
200 Ignored if ``dimensions`` is provided. Deprecated and will be
201 removed after v27.
202 universe : `DimensionUniverse`
203 All known dimensions and their relationships; used to expand and
204 validate dependencies when ``graph`` is not provided.
205 defaults : `DataCoordinate`, optional
206 Default dimension key-value pairs to use when needed. These are
207 never used to infer ``graph``, and are ignored if a different value
208 is provided for the same key in ``mapping`` or `**kwargs``.
209 **kwargs
210 Additional keyword arguments are treated like additional key-value
211 pairs in ``mapping``.
213 Returns
214 -------
215 coordinate : `DataCoordinate`
216 A validated `DataCoordinate` instance.
218 Raises
219 ------
220 TypeError
221 Raised if the set of optional arguments provided is not supported.
222 DimensionNameError
223 Raised if a key-value pair for a required dimension is missing.
224 """
225 universe = (
226 universe
227 or getattr(dimensions, "universe", None)
228 or getattr(graph, "universe", None)
229 or getattr(mapping, "universe", None)
230 )
231 if universe is None:
232 raise TypeError(
233 "universe must be provided, either directly or via dimensions, mapping, or graph."
234 )
235 if graph is not None:
236 # TODO: remove argument on DM-41326.
237 warnings.warn(
238 "The 'graph' argument to DataCoordinate.standardize is deprecated in favor of the "
239 "'dimensions' argument, and will be removed after v27.",
240 category=FutureWarning,
241 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
242 )
243 dimensions = graph.names
244 if dimensions is not None:
245 dimensions = universe.conform(dimensions)
246 del graph # make sure we don't actualy use this below
247 new_mapping: dict[str, DataIdValue] = {}
248 if isinstance(mapping, DataCoordinate):
249 if dimensions is None:
250 if not kwargs:
251 # Already standardized to exactly what we want.
252 return mapping
253 elif kwargs.keys().isdisjoint(dimensions.names):
254 # User provided kwargs, but told us not to use them by
255 # passing in dimensions that are disjoint from those kwargs.
256 # This is not necessarily user error - it's a useful pattern
257 # to pass in all of the key-value pairs you have and let the
258 # code here pull out only what it needs.
259 return mapping.subset(dimensions.names)
260 new_mapping.update((name, mapping[name]) for name in mapping.dimensions.required)
261 if mapping.hasFull():
262 new_mapping.update((name, mapping[name]) for name in mapping.dimensions.implied)
263 elif isinstance(mapping, NamedKeyMapping):
264 warnings.warn(
265 "Passing a NamedKeyMapping to DataCoordinate.standardize is deprecated, and will be "
266 "removed after v27.",
267 category=FutureWarning,
268 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
269 )
270 new_mapping.update(mapping.byName())
271 elif mapping is not None:
272 new_mapping.update(mapping)
273 new_mapping.update(kwargs)
274 if dimensions is None:
275 if defaults is not None:
276 universe = defaults.universe
277 elif universe is None:
278 raise TypeError("universe must be provided if graph is not.")
279 dimensions = DimensionGroup(universe, new_mapping.keys())
280 if not dimensions:
281 return DataCoordinate.make_empty(universe)
282 # Some backends cannot handle numpy.int64 type which is a subclass of
283 # numbers.Integral; convert that to int.
284 for k, v in new_mapping.items():
285 if isinstance(v, numbers.Integral):
286 new_mapping[k] = int(v) # type: ignore
287 if defaults is not None:
288 for k, v in defaults.mapping.items():
289 new_mapping.setdefault(k, v)
290 if new_mapping.keys() >= dimensions.names:
291 return DataCoordinate.from_full_values(
292 dimensions, tuple(new_mapping[name] for name in dimensions.data_coordinate_keys)
293 )
294 else:
295 try:
296 values = tuple(new_mapping[name] for name in dimensions.required)
297 except KeyError as err:
298 raise DimensionNameError(
299 f"No value in data ID ({mapping}) for required dimension {err}."
300 ) from err
301 return DataCoordinate.from_required_values(dimensions, values)
303 @property
304 @abstractmethod
305 def mapping(self) -> Mapping[str, DataIdValue]:
306 """A mapping view of the data ID with keys for all dimensions it has
307 values for.
308 """
309 raise NotImplementedError()
311 @property
312 @abstractmethod
313 def required(self) -> Mapping[str, DataIdValue]:
314 """A mapping view of the data ID with keys for just its required
315 dimensions.
316 """
317 raise NotImplementedError()
319 @property
320 @abstractmethod
321 def required_values(self) -> tuple[DataIdValue, ...]:
322 """The required values (only) of this data ID as a tuple.
324 Element order is consistent with `required`.
326 In contexts where all data IDs have the same dimensions, comparing and
327 hashing these tuples can be much faster than comparing the original
328 `DataCoordinate` instances.
329 """
330 raise NotImplementedError()
332 @property
333 def full_values(self) -> tuple[DataIdValue, ...]:
334 """The full values (only) of this data ID as a tuple.
336 Element order is consistent with `DimensionGroup.data_coordinate_keys`,
337 i.e. all required dimensions followed by all implied dimensions.
338 """
339 raise ValueError(f"DataCoordinate {self} has only required values.")
341 @staticmethod
342 def makeEmpty(universe: DimensionUniverse) -> DataCoordinate:
343 """Return an empty `DataCoordinate`.
345 It identifies the null set of dimensions.
347 Parameters
348 ----------
349 universe : `DimensionUniverse`
350 Universe to which this null dimension set belongs.
352 Returns
353 -------
354 dataId : `DataCoordinate`
355 A data ID object that identifies no dimensions. `hasFull` and
356 `hasRecords` are guaranteed to return `True`, because both `full`
357 and `records` are just empty mappings.
358 """
359 return DataCoordinate.make_empty(universe)
361 @staticmethod
362 def make_empty(universe: DimensionUniverse) -> DataCoordinate:
363 """Return an empty `DataCoordinate`.
365 It identifies the null set of dimensions.
367 Parameters
368 ----------
369 universe : `DimensionUniverse`
370 Universe to which this null dimension set belongs.
372 Returns
373 -------
374 data_id : `DataCoordinate`
375 A data ID object that identifies no dimensions. `hasFull` and
376 `hasRecords` are guaranteed to return `True`, because both `full`
377 and `records` are just empty mappings.
378 """
379 return _ExpandedTupleDataCoordinate(universe.empty.as_group(), (), {})
381 # TODO: remove on DM-41326.
382 @staticmethod
383 @deprecated(
384 "fromRequiredValues is deprecated in favor of from_required_values, "
385 "which takes a DimensionGroup instead of a DimensionGraph. It will be "
386 "removed after v27.",
387 version="v27",
388 category=FutureWarning,
389 )
390 def fromRequiredValues(graph: DimensionGraph, values: tuple[DataIdValue, ...]) -> DataCoordinate:
391 """Construct a `DataCoordinate` from required dimension values.
393 This method is deprecated in favor of `from_required_values`.
395 This is a low-level interface with at most assertion-level checking of
396 inputs. Most callers should use `standardize` instead.
398 Parameters
399 ----------
400 graph : `DimensionGraph`
401 Dimensions this data ID will identify.
402 values : `tuple` [ `int` or `str` ]
403 Tuple of primary key values corresponding to ``graph.required``,
404 in that order.
406 Returns
407 -------
408 dataId : `DataCoordinate`
409 A data ID object that identifies the given dimensions.
410 ``dataId.hasFull()`` will return `True` only if ``graph.implied``
411 is empty. ``dataId.hasRecords()`` will return `True`
412 if and only if ``graph`` is empty.
413 """
414 return DataCoordinate.from_required_values(graph._group, values)
416 @staticmethod
417 def from_required_values(dimensions: DimensionGroup, values: tuple[DataIdValue, ...]) -> DataCoordinate:
418 """Construct a `DataCoordinate` from required dimension values.
420 This is a low-level interface with at most assertion-level checking of
421 inputs. Most callers should use `standardize` instead.
423 Parameters
424 ----------
425 dimensions : `DimensionGroup`
426 Dimensions this data ID will identify.
427 values : `tuple` [ `int` or `str` ]
428 Tuple of primary key values corresponding to ``graph.required``, in
429 that order.
431 Returns
432 -------
433 data_id : `DataCoordinate`
434 A data ID object that identifies the given dimensions.
435 ``dataId.hasFull()`` will return `True` only if
436 ``dimensions.implied`` is empty. ``dataId.hasRecords()`` will
437 return `True` if and only if ``graph`` is empty.
438 """
439 assert len(dimensions.required) == len(
440 values
441 ), f"Inconsistency between dimensions {dimensions.required} and required values {values}."
442 if not dimensions:
443 return DataCoordinate.make_empty(dimensions.universe)
444 if not dimensions.implied:
445 return _FullTupleDataCoordinate(dimensions, values)
446 return _RequiredTupleDataCoordinate(dimensions, values)
448 # TODO: remove on DM-41326.
449 @staticmethod
450 @deprecated(
451 "fromFullValues is deprecated in favor of from_full_values, "
452 "which takes a DimensionGroup instead of a DimensionGraph. It will be "
453 "removed after v27.",
454 version="v27",
455 category=FutureWarning,
456 )
457 def fromFullValues(graph: DimensionGraph, values: tuple[DataIdValue, ...]) -> DataCoordinate:
458 """Construct a `DataCoordinate` from all dimension values.
460 This method is deprecated in favor of `from_full_values`.
462 This is a low-level interface with at most assertion-level checking of
463 inputs. Most callers should use `standardize` instead.
465 Parameters
466 ----------
467 graph : `DimensionGraph`
468 Dimensions this data ID will identify.
469 values : `tuple` [ `int` or `str` ]
470 Tuple of primary key values corresponding to
471 ``itertools.chain(graph.required, graph.implied)``, in that order.
472 Note that this is _not_ the same order as ``graph.dimensions``,
473 though these contain the same elements.
475 Returns
476 -------
477 dataId : `DataCoordinate`
478 A data ID object that identifies the given dimensions.
479 ``dataId.hasFull()`` will always return `True`.
480 ``dataId.hasRecords()`` will only return `True` if ``graph`` is
481 empty.
482 """
483 return DataCoordinate.from_full_values(graph._group, values)
485 @staticmethod
486 def from_full_values(dimensions: DimensionGroup, values: tuple[DataIdValue, ...]) -> DataCoordinate:
487 """Construct a `DataCoordinate` from all dimension values.
489 This is a low-level interface with at most assertion-level checking of
490 inputs. Most callers should use `standardize` instead.
492 Parameters
493 ----------
494 dimensions : `DimensionGroup`
495 Dimensions this data ID will identify.
496 values : `tuple` [ `int` or `str` ]
497 Tuple of primary key values corresponding to
498 ``itertools.chain(graph.required, graph.implied)``, in that order.
499 Note that this is _not_ the same order as ``graph.dimensions``,
500 though these contain the same elements.
502 Returns
503 -------
504 data_id : `DataCoordinate`
505 A data ID object that identifies the given dimensions.
506 ``dataId.hasFull()`` will always return `True`.
507 ``dataId.hasRecords()`` will only return `True` if ``dimensions``
508 is empty.
509 """
510 assert len(dimensions) == len(
511 values
512 ), f"Inconsistency between dimensions {dimensions.data_coordinate_keys} and full values {values}."
513 if not dimensions:
514 return DataCoordinate.make_empty(dimensions.universe)
515 return _FullTupleDataCoordinate(dimensions, values)
517 def __bool__(self) -> bool:
518 return bool(self.dimensions)
520 def __hash__(self) -> int:
521 return hash((self.dimensions,) + self.required_values)
523 def __eq__(self, other: Any) -> bool:
524 if not isinstance(other, DataCoordinate):
525 other = DataCoordinate.standardize(other, universe=self.universe)
526 return self.dimensions == other.dimensions and self.required_values == other.required_values
528 def __repr__(self) -> str:
529 # We can't make repr yield something that could be exec'd here without
530 # printing out the whole DimensionUniverse.
531 return str(self.mapping)
533 def __lt__(self, other: Any) -> bool:
534 if not isinstance(other, DataCoordinate):
535 return NotImplemented
536 # Unlike repr() we only use required keys here to ensure that __eq__
537 # can not be true simultaneously with __lt__ being true.
538 return self.required_values < other.required_values
540 # TODO: remove on DM-41326.
541 @deprecated(
542 "Using DataCoordinate as a Mapping is deprecated in favor of the "
543 ".mapping and .required attributes, and will be dropped after v27.",
544 version="v27",
545 category=FutureWarning,
546 )
547 def __iter__(self) -> Iterator[Dimension]:
548 return iter(self.keys())
550 # TODO: remove on DM-41326.
551 @deprecated(
552 "Using DataCoordinate as a Mapping is deprecated in favor of the "
553 ".mapping and .required attributes, and will be dropped after v27.",
554 version="v27",
555 category=FutureWarning,
556 )
557 def __len__(self) -> int:
558 return len(self.keys())
560 # TODO: remove on DM-41326.
561 @deprecated(
562 "Using DataCoordinate as a Mapping is deprecated in favor of the "
563 ".mapping and .required attributes, and will be dropped after v27.",
564 version="v27",
565 category=FutureWarning,
566 )
567 def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
568 return self.graph.required
570 # TODO: remove on DM-41326.
571 @property
572 @deprecated(
573 "DataCoordinate.names is deprecated in favor of the .dimensions "
574 "attribute, and will be dropped after v27.",
575 version="v27",
576 category=FutureWarning,
577 )
578 def names(self) -> Set[str]:
579 """Names of the required dimensions identified by this data ID.
581 They are returned in the same order as `keys`
582 (`collections.abc.Set` [ `str` ]).
583 """
584 return self.keys().names
586 @abstractmethod
587 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
588 """Return a `DataCoordinate` whose graph is a subset of ``self.graph``.
590 Parameters
591 ----------
592 dimensions : `DimensionGraph`, `DimensionGroup`, or \
593 `~collections.abc.Iterable` [ `str` ]
594 The dimensions identified by the returned `DataCoordinate`.
595 Passing a `DimensionGraph` is deprecated and support will be
596 dropped after v27.
598 Returns
599 -------
600 coordinate : `DataCoordinate`
601 A `DataCoordinate` instance that identifies only the given
602 dimensions. May be ``self`` if ``graph == self.graph``.
604 Raises
605 ------
606 KeyError
607 Raised if the primary key value for one or more required dimensions
608 is unknown. This may happen even if the required subset of the new
609 dimensions are not a subset of the dimensions actually known by
610 this data ID.. As an example, consider trying to go from a data ID
611 with dimensions {instrument, physical_filter, band} to just
612 {instrument, band}; band is implied by physical_filter and hence
613 would have no value in the original data ID if ``self.hasFull()``
614 is `False`.
616 Notes
617 -----
618 If `hasFull` and `hasRecords` return `True` on ``self``, they will
619 return `True` (respectively) on the returned `DataCoordinate` as well.
620 The converse does not hold.
621 """
622 # TODO: update docs r.e. deprecation on DM-41326.
623 raise NotImplementedError()
625 @abstractmethod
626 def union(self, other: DataCoordinate) -> DataCoordinate:
627 """Combine two data IDs.
629 Yields a new one that identifies all dimensions that either of them
630 identify.
632 Parameters
633 ----------
634 other : `DataCoordinate`
635 Data ID to combine with ``self``.
637 Returns
638 -------
639 unioned : `DataCoordinate`
640 A `DataCoordinate` instance that satisfies
641 ``unioned.dimensions == self.dimensions.union(other.dimensions)``.
642 Will preserve ``hasFull`` and ``hasRecords`` whenever possible.
644 Notes
645 -----
646 No checking for consistency is performed on values for keys that
647 ``self`` and ``other`` have in common, and which value is included in
648 the returned data ID is not specified.
649 """
650 raise NotImplementedError()
652 @abstractmethod
653 def expanded(
654 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
655 ) -> DataCoordinate:
656 """Return a `DataCoordinate` that holds the given records.
658 Guarantees that `hasRecords` returns `True`.
660 This is a low-level interface with at most assertion-level checking of
661 inputs. Most callers should use `Registry.expandDataId` instead.
663 Parameters
664 ----------
665 records : `~collections.abc.Mapping` [ `str`, `DimensionRecord` or \
666 `None` ]
667 A `NamedKeyMapping` with `DimensionElement` keys or a regular
668 `~collections.abc.Mapping` with `str` (`DimensionElement` name)
669 keys and `DimensionRecord` values. Keys must cover all elements in
670 ``self.graph.elements``. Values may be `None`, but only to reflect
671 actual NULL values in the database, not just records that have not
672 been fetched. Passing a `NamedKeyMapping` is deprecated and will
673 not be supported after v27.
674 """
675 # TODO: update docs r.e. deprecation on DM-41326.
676 raise NotImplementedError()
678 @property
679 def universe(self) -> DimensionUniverse:
680 """Universe that defines all known compatible dimensions.
682 The universe will be compatible with this coordinate
683 (`DimensionUniverse`).
684 """
685 return self.dimensions.universe
687 @property
688 @abstractmethod
689 def dimensions(self) -> DimensionGroup:
690 """Dimensions identified by this data ID (`DimensionGroup`).
692 Note that values are only required to be present for dimensions in
693 ``self.dimensions.required``; all others may be retrieved (from a
694 `Registry`) given these.
695 """
696 raise NotImplementedError()
698 # TODO: remove on DM-41326.
699 @property
700 @deprecated(
701 "DataCoordinate.graph is deprecated in favor of .dimensions, and will be dropped after v27.",
702 version="v27",
703 category=FutureWarning,
704 )
705 def graph(self) -> DimensionGraph:
706 """Dimensions identified by this data ID (`DimensionGraph`).
708 Note that values are only required to be present for dimensions in
709 ``self.graph.required``; all others may be retrieved (from a
710 `Registry`) given these.
711 """
712 return self.dimensions._as_graph()
714 @abstractmethod
715 def hasFull(self) -> bool:
716 """Whether this data ID contains implied and required values.
718 Returns
719 -------
720 state : `bool`
721 If `True`, `__getitem__`, `get`, and `__contains__` (but not
722 `keys`!) will act as though the mapping includes key-value pairs
723 for implied dimensions, and the `full` property may be used. If
724 `False`, these operations only include key-value pairs for required
725 dimensions, and accessing `full` is an error. Always `True` if
726 there are no implied dimensions.
727 """
728 raise NotImplementedError()
730 # TODO: remove on DM-41326.
731 @property
732 @deprecated(
733 "DataCoordinate.full is deprecated in favor of .mapping, and will be dropped after v27.",
734 version="v27",
735 category=FutureWarning,
736 )
737 @abstractmethod
738 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
739 """Return mapping for all dimensions in ``self.dimensions``.
741 The mapping includes key-value pairs for all dimensions in
742 ``self.dimensions``, including implied.
744 Accessing this attribute if `hasFull` returns `False` is a logic error
745 that may raise an exception of unspecified type either immediately or
746 when implied keys are accessed via the returned mapping, depending on
747 the implementation and whether assertions are enabled.
748 """
749 raise NotImplementedError()
751 # TODO: remove on DM-41326.
752 @deprecated(
753 "DataCoordinate.values_tuple() is deprecated in favor of .required_values, and will be dropped "
754 "after v27.",
755 version="v27",
756 category=FutureWarning,
757 )
758 def values_tuple(self) -> tuple[DataIdValue, ...]:
759 """Return the required values (only) of this data ID as a tuple.
761 In contexts where all data IDs have the same dimensions, comparing and
762 hashing these tuples can be *much* faster than comparing the original
763 `DataCoordinate` instances.
764 """
765 return self.required_values
767 @abstractmethod
768 def hasRecords(self) -> bool:
769 """Whether this data ID contains records.
771 These are the records for all of the dimension elements it identifies.
773 Returns
774 -------
775 state : `bool`
776 If `True`, the following attributes may be accessed:
778 - `records`
779 - `region`
780 - `timespan`
781 - `pack`
783 If `False`, accessing any of these is considered a logic error.
784 """
785 raise NotImplementedError()
787 @property
788 def records(self) -> NamedKeyMapping[DimensionElement, DimensionRecord | None]:
789 """A mapping that contains `DimensionRecord` objects for all
790 elements identified by this data ID.
792 This mapping will become a regular `~collections.abc.Mapping` with
793 `str` keys after v27.
795 Notes
796 -----
797 The values of this mapping may be `None` if and only if there is no
798 record for that element with these dimensions in the database (which
799 means some foreign key field must have a NULL value).
801 Accessing this attribute if `hasRecords` returns `False` is a logic
802 error that may raise an exception of unspecified type either
803 immediately or when the returned mapping is used, depending on the
804 implementation and whether assertions are enabled.
805 """
806 assert self.hasRecords(), "records may only be accessed if hasRecords() returns True."
807 return _DataCoordinateRecordsView(self)
809 @abstractmethod
810 def _record(self, name: str) -> DimensionRecord | None:
811 """Protected implementation hook that backs the ``records`` attribute.
813 Parameters
814 ----------
815 name : `str`
816 The name of a `DimensionElement`, guaranteed to be in
817 ``self.dimensions.elements``.
819 Returns
820 -------
821 record : `DimensionRecord` or `None`
822 The dimension record for the given element identified by this
823 data ID, or `None` if there is no such record.
824 """
825 raise NotImplementedError()
827 @property
828 def region(self) -> Region | None:
829 """Spatial region associated with this data ID.
831 (`lsst.sphgeom.Region` or `None`).
833 This is `None` if and only if ``self.dimensions.spatial`` is empty.
835 Accessing this attribute if `hasRecords` returns `False` is a logic
836 error that may or may not raise an exception, depending on the
837 implementation and whether assertions are enabled.
838 """
839 assert self.hasRecords(), "region may only be accessed if hasRecords() returns True."
840 regions = []
841 for family in self.dimensions.spatial:
842 element = family.choose(self.dimensions.elements, self.universe)
843 record = self._record(element.name)
844 if record is None or record.region is None:
845 return None
846 else:
847 regions.append(record.region)
848 return _intersectRegions(*regions)
850 @property
851 def timespan(self) -> Timespan | None:
852 """Temporal interval associated with this data ID.
854 (`Timespan` or `None`).
856 This is `None` if and only if ``self.dimensions.temporal`` is empty.
858 Accessing this attribute if `hasRecords` returns `False` is a logic
859 error that may or may not raise an exception, depending on the
860 implementation and whether assertions are enabled.
861 """
862 assert self.hasRecords(), "timespan may only be accessed if hasRecords() returns True."
863 timespans = []
864 for family in self.dimensions.temporal:
865 element = family.choose(self.dimensions.elements, self.universe)
866 record = self._record(element.name)
867 # DimensionRecord subclasses for temporal elements always have
868 # .timespan, but they're dynamic so this can't be type-checked.
869 if record is None or record.timespan is None:
870 return None
871 else:
872 timespans.append(record.timespan)
873 if not timespans:
874 return None
875 elif len(timespans) == 1:
876 return timespans[0]
877 else:
878 return Timespan.intersection(*timespans)
880 def to_simple(self, minimal: bool = False) -> SerializedDataCoordinate:
881 """Convert this class to a simple python type.
883 This is suitable for serialization.
885 Parameters
886 ----------
887 minimal : `bool`, optional
888 Use minimal serialization. If set the records will not be attached.
890 Returns
891 -------
892 simple : `SerializedDataCoordinate`
893 The object converted to simple form.
894 """
895 # Convert to a dict form
896 records: dict[str, SerializedDimensionRecord] | None
897 if not minimal and self.hasRecords():
898 records = {
899 k: v.to_simple() for k in self.dimensions.elements if (v := self.records[k]) is not None
900 }
901 else:
902 records = None
904 return SerializedDataCoordinate(dataId=dict(self.mapping), records=records)
906 @classmethod
907 def from_simple(
908 cls,
909 simple: SerializedDataCoordinate,
910 universe: DimensionUniverse | None = None,
911 registry: Registry | None = None,
912 ) -> DataCoordinate:
913 """Construct a new object from the simplified form.
915 The data is assumed to be of the form returned from the `to_simple`
916 method.
918 Parameters
919 ----------
920 simple : `dict` of [`str`, `Any`]
921 The `dict` returned by `to_simple()`.
922 universe : `DimensionUniverse`
923 Object that manages all known dimensions.
924 registry : `lsst.daf.butler.Registry`, optional
925 Registry from which a universe can be extracted. Can be `None`
926 if universe is provided explicitly.
928 Returns
929 -------
930 dataId : `DataCoordinate`
931 Newly-constructed object.
932 """
933 key = (frozenset(simple.dataId.items()), simple.records is not None)
934 cache = PersistenceContextVars.dataCoordinates.get()
935 if cache is not None and (result := cache.get(key)) is not None:
936 return result
937 if universe is None and registry is None:
938 raise ValueError("One of universe or registry is required to convert a dict to a DataCoordinate")
939 if universe is None and registry is not None:
940 universe = registry.dimensions
941 if universe is None:
942 # this is for mypy
943 raise ValueError("Unable to determine a usable universe")
945 dataId = cls.standardize(simple.dataId, universe=universe)
946 if simple.records:
947 dataId = dataId.expanded(
948 {k: DimensionRecord.from_simple(v, universe=universe) for k, v in simple.records.items()}
949 )
950 if cache is not None:
951 cache[key] = dataId
952 return dataId
954 to_json = to_json_pydantic
955 from_json: ClassVar = classmethod(from_json_pydantic)
958DataId = DataCoordinate | Mapping[str, Any]
959"""A type-annotation alias for signatures that accept both informal data ID
960dictionaries and validated `DataCoordinate` instances.
961"""
964# Deprecated by having its only public access (DataCoordinate.full) deprecated.
965# TODO: remove on DM-41326.
966class _DataCoordinateFullView(NamedKeyMapping[Dimension, DataIdValue]):
967 """View class for `DataCoordinate.full`.
969 Provides the default implementation for
970 `DataCoordinate.full`.
972 Parameters
973 ----------
974 target : `DataCoordinate`
975 The `DataCoordinate` instance this object provides a view of.
976 """
978 def __init__(self, target: _BasicTupleDataCoordinate):
979 self._target = target
981 __slots__ = ("_target",)
983 def __repr__(self) -> str:
984 return repr(self._target)
986 def __getitem__(self, key: DataIdKey) -> DataIdValue:
987 return self._target[key]
989 def __iter__(self) -> Iterator[Dimension]:
990 return iter(self.keys())
992 def __len__(self) -> int:
993 return len(self.keys())
995 def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
996 return self._target.graph.dimensions
998 @property
999 def names(self) -> Set[str]:
1000 # Docstring inherited from `NamedKeyMapping`.
1001 return self.keys().names
1004# TODO: Make a Mapping[str, DimensionRecord | None] on DM-41326.
1005class _DataCoordinateRecordsView(NamedKeyMapping[DimensionElement, DimensionRecord | None]):
1006 """View class for `DataCoordinate.records`.
1008 Provides the default implementation for
1009 `DataCoordinate.records`.
1011 Parameters
1012 ----------
1013 target : `DataCoordinate`
1014 The `DataCoordinate` instance this object provides a view of.
1015 """
1017 def __init__(self, target: DataCoordinate):
1018 self._target = target
1020 __slots__ = ("_target",)
1022 def __repr__(self) -> str:
1023 terms = [f"{d}: {self[d]!r}" for d in self._target.graph.elements.names]
1024 return "{{{}}}".format(", ".join(terms))
1026 def __str__(self) -> str:
1027 return "\n".join(str(v) for v in self.values())
1029 def __getitem__(self, key: DimensionElement | str) -> DimensionRecord | None:
1030 if isinstance(key, DimensionElement):
1031 warnings.warn(
1032 "Using Dimension keys in DataCoordinate is deprecated and will not be supported after v27.",
1033 category=FutureWarning,
1034 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1035 )
1036 key = key.name
1037 return self._target._record(key)
1039 # TODO: fix on DM-41326.
1040 @deprecated(
1041 "Iteration over DataCoordinate.records is deprecated as the key type will change to 'str' after "
1042 "v27. Use DataCoordinate.dimensions.elements to get the names of all dimension elements instead.",
1043 version="v27",
1044 category=FutureWarning,
1045 )
1046 def __iter__(self) -> Iterator[DimensionElement]:
1047 return iter(self.keys())
1049 def __len__(self) -> int:
1050 return len(self.keys())
1052 # TODO: remove on DM-41326.
1053 # Deprecation warning will come from using .graph.
1054 def keys(self) -> NamedValueAbstractSet[DimensionElement]: # type: ignore
1055 return self._target.graph.elements
1057 @property
1058 @deprecated(
1059 "DataCoordinate.records.names is deprecated in favor of DataCoordinate.dimensions.elements and "
1060 "will be removed after v27.",
1061 version="v27",
1062 category=FutureWarning,
1063 )
1064 def names(self) -> Set[str]:
1065 # Docstring inherited from `NamedKeyMapping`.
1066 return self.keys().names
1069class _BasicTupleDataCoordinate(DataCoordinate):
1070 """Intermediate base class for the standard implementation of
1071 `DataCoordinate`.
1073 This class should only be accessed outside this module via the
1074 `DataCoordinate` interface, and should only be constructed via the static
1075 methods there.
1077 Parameters
1078 ----------
1079 dimensions : `DimensionGroup`
1080 The dimensions to be identified.
1081 values : `tuple` [ `int` or `str` ]
1082 Data ID values, ordered to match
1083 ``dimensions.data_coordinate_keys``. May include values for just
1084 required dimensions (which always come first) or all dimensions
1085 (concrete subclasses implementations will care which).
1086 """
1088 def __init__(self, dimensions: DimensionGroup, values: tuple[DataIdValue, ...]):
1089 self._dimensions = dimensions
1090 self._values = values
1092 __slots__ = ("_dimensions", "_values")
1094 @property
1095 def dimensions(self) -> DimensionGroup:
1096 # Docstring inherited from DataCoordinate.
1097 return self._dimensions
1099 @property
1100 def required(self) -> Mapping[str, DataIdValue]:
1101 # Docstring inherited from DataCoordinate.
1102 return _DataCoordinateRequiredMappingView(self)
1104 def __getitem__(self, key: DataIdKey) -> DataIdValue:
1105 # Docstring inherited from DataCoordinate.
1106 # TODO: remove on DM-41326.
1107 if isinstance(key, Dimension):
1108 warnings.warn(
1109 "Using Dimension keys in DataCoordinate is deprecated and will not be supported after v27.",
1110 category=FutureWarning,
1111 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1112 )
1113 key = key.name
1114 index = self._dimensions._data_coordinate_indices[key]
1115 try:
1116 return self._values[index]
1117 except IndexError:
1118 # Caller asked for an implied dimension, but this object only has
1119 # values for the required ones.
1120 raise KeyError(key) from None
1122 # TODO: remove on DM-41326.
1123 @deprecated(
1124 "Using DataCoordinate as a NamedKeyMapping is deprecated in favor of the "
1125 ".mapping and .required attributes, and will be dropped after v27. "
1126 "Use `dict(data_id.required)` as an exact replacement for `data_id.byName()`.",
1127 version="v27",
1128 category=FutureWarning,
1129 )
1130 def byName(self) -> dict[str, DataIdValue]:
1131 # Docstring inheritance.
1132 # Reimplementation is for optimization; `required_values` is much
1133 # faster to iterate over than values() because it doesn't go through
1134 # `__getitem__`.
1135 return dict(zip(self.names, self.required_values, strict=True))
1137 def hasRecords(self) -> bool:
1138 # Docstring inherited from DataCoordinate.
1139 return False
1141 def _record(self, name: str) -> DimensionRecord | None:
1142 # Docstring inherited from DataCoordinate.
1143 raise AssertionError()
1145 def __getattr__(self, name: str) -> Any:
1146 if name in self.dimensions.elements:
1147 raise AttributeError(
1148 f"Dimension record attribute {name!r} is only available on expanded DataCoordinates."
1149 )
1150 raise AttributeError(name)
1153class _DataCoordinateRequiredMappingView(Mapping[str, DataIdValue]):
1154 """A DataCoordinate Mapping view class whose keys are just the required
1155 dimensions.
1156 """
1158 def __init__(self, target: DataCoordinate):
1159 self._target = target
1161 __slots__ = ("_target",)
1163 def __getitem__(self, key: str) -> DataIdValue:
1164 if key not in self._target.dimensions.required:
1165 raise KeyError(key)
1166 return self._target[key]
1168 def __len__(self) -> int:
1169 return len(self._target.dimensions.required)
1171 def __iter__(self) -> Iterator[str]:
1172 return iter(self._target.dimensions.required)
1174 def __repr__(self) -> str:
1175 return f"{{{', '.join(f'{k}: {v!r}' for k, v in self.items())}}}"
1178class _DataCoordinateFullMappingView(Mapping[str, DataIdValue]):
1179 """A DataCoordinate Mapping view class whose keys are all dimensions."""
1181 def __init__(self, target: DataCoordinate):
1182 self._target = target
1184 __slots__ = ("_target",)
1186 def __getitem__(self, key: str) -> DataIdValue:
1187 return self._target[key]
1189 def __len__(self) -> int:
1190 return len(self._target.dimensions)
1192 def __iter__(self) -> Iterator[str]:
1193 return iter(self._target.dimensions.data_coordinate_keys)
1195 def __repr__(self) -> str:
1196 return f"{{{', '.join(f'{k}: {v!r}' for k, v in self.items())}}}"
1199class _RequiredTupleDataCoordinate(_BasicTupleDataCoordinate):
1200 """A `DataCoordinate` implementation that has values for required
1201 dimensions only, when implied dimensions already exist.
1203 Note that `_FullTupleDataCoordinate` should be used if there are no
1204 implied dimensions.
1206 This class should only be accessed outside this module via the
1207 `DataCoordinate` interface, and should only be constructed via calls to
1208 `DataCoordinate.from_full_values`.
1209 """
1211 __slots__ = ()
1213 @property
1214 def mapping(self) -> Mapping[str, DataIdValue]:
1215 # Docstring inherited from DataCoordinate.
1216 return _DataCoordinateRequiredMappingView(self)
1218 @property
1219 def required_values(self) -> tuple[DataIdValue, ...]:
1220 # Docstring inherited from DataCoordinate.
1221 return self._values
1223 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1224 # Docstring inherited from DataCoordinate.
1225 dimensions = self.universe.conform(dimensions)
1226 if self._dimensions == dimensions:
1227 return self
1228 elif self._dimensions.required >= dimensions.names:
1229 return DataCoordinate.from_full_values(
1230 dimensions,
1231 tuple(self[k] for k in dimensions.data_coordinate_keys),
1232 )
1233 else:
1234 return DataCoordinate.from_required_values(
1235 dimensions, tuple(self[k] for k in dimensions.required)
1236 )
1238 def union(self, other: DataCoordinate) -> DataCoordinate:
1239 # Docstring inherited from DataCoordinate.
1240 dimensions = self.dimensions.union(other.dimensions)
1241 # See if the other one is already what we want to return. We don't
1242 # shortcut-return 'self' because `other` might have full values or
1243 # even records, and we want to return the more complete data ID.
1244 if other.dimensions == dimensions:
1245 return other
1246 # General case with actual merging of dictionaries.
1247 values = dict(self.mapping)
1248 values.update(other.mapping)
1249 return DataCoordinate.standardize(values, dimensions=dimensions)
1251 # TODO: remove on DM-41326.
1252 @property
1253 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
1254 # Docstring inherited.
1255 raise AssertionError("full may only be accessed if hasFull() returns True.")
1257 def expanded(
1258 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1259 ) -> DataCoordinate:
1260 # Docstring inherited from DataCoordinate
1261 # Extract a complete values tuple from the attributes of the given
1262 # records. It's possible for these to be inconsistent with
1263 # self._values (which is a serious problem, of course), but we've
1264 # documented this as a no-checking API.
1265 values = self._values + tuple(
1266 getattr(records[d], cast(Dimension, self.universe[d]).primaryKey.name)
1267 for d in self._dimensions.implied
1268 )
1269 if isinstance(records, NamedKeyMapping):
1270 warnings.warn(
1271 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1272 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1273 category=FutureWarning,
1274 )
1275 return _ExpandedTupleDataCoordinate(self._dimensions, values, records)
1277 def hasFull(self) -> bool:
1278 # Docstring inherited from DataCoordinate.
1279 return False
1281 def __reduce__(self) -> tuple[Any, ...]:
1282 return (_RequiredTupleDataCoordinate, (self._dimensions, self._values))
1285class _FullTupleDataCoordinate(_BasicTupleDataCoordinate):
1286 """A `DataCoordinate` implementation that has values for all dimensions.
1288 This class should only be accessed outside this module via the
1289 `DataCoordinate` interface, and should only be constructed via calls to
1290 `DataCoordinate.from_full_values`.
1291 """
1293 __slots__ = ()
1295 @property
1296 def mapping(self) -> Mapping[str, DataIdValue]:
1297 # Docstring inherited from DataCoordinate.
1298 return _DataCoordinateFullMappingView(self)
1300 @property
1301 def required_values(self) -> tuple[DataIdValue, ...]:
1302 # Docstring inherited from DataCoordinate.
1303 return self._values[: len(self._dimensions.required)]
1305 @property
1306 def full_values(self) -> tuple[DataIdValue, ...]:
1307 # Docstring inherited from DataCoordinate.
1308 return self._values
1310 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1311 # Docstring inherited from DataCoordinate.
1312 dimensions = self.universe.conform(dimensions)
1313 if self._dimensions == dimensions:
1314 return self
1315 return DataCoordinate.from_full_values(
1316 dimensions,
1317 tuple(self[k] for k in dimensions.data_coordinate_keys),
1318 )
1320 def union(self, other: DataCoordinate) -> DataCoordinate:
1321 # Docstring inherited from DataCoordinate.
1322 dimensions = self.dimensions.union(other.dimensions)
1323 # See if one or both input data IDs is already what we want to return;
1324 # if so, return the most complete one we have.
1325 if other.dimensions == dimensions and other.hasRecords():
1326 return other
1327 elif self.dimensions == dimensions and not other.hasRecords():
1328 return self
1329 # General case with actual merging of dictionaries.
1330 values = dict(self.mapping)
1331 values.update(other.mapping)
1332 return DataCoordinate.standardize(values, dimensions=dimensions)
1334 # TODO: remove on DM-41326.
1335 @property
1336 @deprecated(
1337 "DataCoordinate.full is deprecated in favor of .mapping, and will be dropped after v27.",
1338 version="v27",
1339 category=FutureWarning,
1340 )
1341 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
1342 # Docstring inherited.
1343 return _DataCoordinateFullView(self)
1345 def expanded(
1346 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1347 ) -> DataCoordinate:
1348 # Docstring inherited from DataCoordinate
1349 if isinstance(records, NamedKeyMapping):
1350 warnings.warn(
1351 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1352 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1353 category=FutureWarning,
1354 )
1355 return _ExpandedTupleDataCoordinate(self._dimensions, self._values, records)
1357 def hasFull(self) -> bool:
1358 # Docstring inherited from DataCoordinate.
1359 return True
1361 def __reduce__(self) -> tuple[Any, ...]:
1362 return (_FullTupleDataCoordinate, (self._dimensions, self._values))
1365class _ExpandedTupleDataCoordinate(_FullTupleDataCoordinate):
1366 """A `DataCoordinate` implementation that directly holds `DimensionRecord`
1367 objects relevant to it.
1369 This class should only be accessed outside this module via the
1370 `DataCoordinate` interface, and should only be constructed via calls to
1371 `DataCoordinate.expanded`.
1373 Parameters
1374 ----------
1375 dimensions : `DimensionGroup`
1376 The dimensions to be identified.
1377 values : `tuple` [ `int` or `str` ]
1378 Data ID values, ordered to match
1379 ``dimensions._data_coordinate_indices``. Just include values for all
1380 dimensions.
1381 records : `~collections.abc.Mapping` [ `str`, `DimensionRecord` or `None` ]
1382 A `NamedKeyMapping` with `DimensionElement` keys or a regular
1383 `~collections.abc.Mapping` with `str` (`DimensionElement` name) keys
1384 and `DimensionRecord` values. Keys must cover all elements in
1385 ``self.dimensions.elements``. Values may be `None`, but only to
1386 reflect actual NULL values in the database, not just records that have
1387 not been fetched.
1388 """
1390 def __init__(
1391 self,
1392 dimensions: DimensionGroup,
1393 values: tuple[DataIdValue, ...],
1394 records: NameLookupMapping[DimensionElement, DimensionRecord | None],
1395 ):
1396 super().__init__(dimensions, values)
1397 assert super().hasFull(), "This implementation requires full dimension records."
1398 self._records = records
1400 __slots__ = ("_records",)
1402 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1403 # Docstring inherited from DataCoordinate.
1404 return super().subset(dimensions).expanded(self._records)
1406 def expanded(
1407 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1408 ) -> DataCoordinate:
1409 # Docstring inherited from DataCoordinate.
1410 if isinstance(records, NamedKeyMapping):
1411 warnings.warn(
1412 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1413 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1414 category=FutureWarning,
1415 )
1416 return self
1418 def union(self, other: DataCoordinate) -> DataCoordinate:
1419 # Docstring inherited from DataCoordinate.
1420 result = super().union(other)
1421 if not result.hasRecords() and other.hasRecords():
1422 records = {e: self._record(e) for e in self.dimensions.elements} | {
1423 e: other._record(e) for e in other.dimensions.elements
1424 }
1425 if records.keys() >= result.dimensions.elements:
1426 return result.expanded(records)
1427 return result
1429 def hasRecords(self) -> bool:
1430 # Docstring inherited from DataCoordinate.
1431 return True
1433 def _record(self, name: str) -> DimensionRecord | None:
1434 # Docstring inherited from DataCoordinate.
1435 return self._records[name]
1437 def __reduce__(self) -> tuple[Any, ...]:
1438 return (_ExpandedTupleDataCoordinate, (self._dimensions, self._values, self._records))
1440 def __getattr__(self, name: str) -> Any:
1441 try:
1442 return self._record(name)
1443 except KeyError:
1444 raise AttributeError(name) from None
1446 def __dir__(self) -> list[str]:
1447 result = list(super().__dir__())
1448 result.extend(self.dimensions.elements)
1449 return result