Coverage for python/lsst/daf/butler/dimensions/_coordinate.py: 46%
454 statements
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-01 11:20 +0000
« prev ^ index » next coverage.py v7.4.1, created at 2024-02-01 11:20 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28#
29# Design notes for this module are in
30# doc/lsst.daf.butler/dev/dataCoordinate.py.
31#
33from __future__ import annotations
35__all__ = (
36 "DataCoordinate",
37 "DataId",
38 "DataIdKey",
39 "DataIdValue",
40 "SerializedDataCoordinate",
41 "SerializedDataId",
42)
44import numbers
45import warnings
46from abc import abstractmethod
47from collections.abc import Iterable, Iterator, Mapping, Set
48from typing import TYPE_CHECKING, Any, ClassVar, cast
50import pydantic
51from deprecated.sphinx import deprecated
52from lsst.sphgeom import IntersectionRegion, Region
53from lsst.utils.introspection import find_outside_stacklevel
55from .._named import NamedKeyMapping, NamedValueAbstractSet, NameLookupMapping
56from .._timespan import Timespan
57from ..json import from_json_pydantic, to_json_pydantic
58from ..persistence_context import PersistenceContextVars
59from ._elements import Dimension, DimensionElement
60from ._graph import DimensionGraph
61from ._group import DimensionGroup
62from ._records import DimensionRecord, SerializedDimensionRecord
64if TYPE_CHECKING: # Imports needed only for type annotations; may be circular.
65 from ..registry import Registry
66 from ._universe import DimensionUniverse
68DataIdKey = str | Dimension
69"""Type annotation alias for the keys that can be used to index a
70DataCoordinate.
71"""
73# Pydantic will cast int to str if str is first in the Union.
74DataIdValue = int | str | None
75"""Type annotation alias for the values that can be present in a
76DataCoordinate or other data ID.
77"""
79SerializedDataId = dict[str, DataIdValue]
80"""Simplified model for serializing the ``mapping`` property of
81`DataCoordinate`.
82"""
85class SerializedDataCoordinate(pydantic.BaseModel):
86 """Simplified model for serializing a `DataCoordinate`."""
88 dataId: SerializedDataId
89 records: dict[str, SerializedDimensionRecord] | None = None
91 @classmethod
92 def direct(cls, *, dataId: SerializedDataId, records: dict[str, dict] | None) -> SerializedDataCoordinate:
93 """Construct a `SerializedDataCoordinate` directly without validators.
95 Parameters
96 ----------
97 dataId : `SerializedDataId`
98 The data ID.
99 records : `dict` or `None`
100 The dimension records.
102 Notes
103 -----
104 This differs from the pydantic "construct" method in that the arguments
105 are explicitly what the model requires, and it will recurse through
106 members, constructing them from their corresponding `direct` methods.
108 This method should only be called when the inputs are trusted.
109 """
110 key = (frozenset(dataId.items()), records is not None)
111 cache = PersistenceContextVars.serializedDataCoordinateMapping.get()
112 if cache is not None and (result := cache.get(key)) is not None:
113 return result
115 if records is None:
116 serialized_records = None
117 else:
118 serialized_records = {k: SerializedDimensionRecord.direct(**v) for k, v in records.items()}
120 node = cls.model_construct(dataId=dataId, records=serialized_records)
122 if cache is not None:
123 cache[key] = node
124 return node
127def _intersectRegions(*args: Region) -> Region | None:
128 """Return the intersection of several regions.
130 For internal use by `ExpandedDataCoordinate` only.
132 If no regions are provided, returns `None`.
133 """
134 if len(args) == 0:
135 return None
136 else:
137 result = args[0]
138 for n in range(1, len(args)):
139 result = IntersectionRegion(result, args[n])
140 return result
143class DataCoordinate(NamedKeyMapping[Dimension, DataIdValue]):
144 """A validated data ID.
146 DataCoordinate guarantees that its key-value pairs identify at least all
147 required dimensions in a `DimensionGroup`.
149 Notes
150 -----
151 `DataCoordinate` is an ABC, but it provides `staticmethod` factory
152 functions for private concrete implementations that should be sufficient
153 for most purposes. `standardize` is the most flexible and safe of these;
154 the others (`make_empty`, `from_required_values`, and `from_full_values`)
155 are more specialized and perform little or no checking of inputs.
157 Lookups for implied dimensions (those in ``self.dimensions.implied``) are
158 supported if and only if `has_full_values` is `True`. This also sets the
159 keys of the `mapping` attribute. This means that `DataCoordinate` equality
160 is not the same as testing for equality on the `mapping` attribute
161 (instead, it is the same as testing for equality on the `required`
162 attribute).
164 See also :ref:`lsst.daf.butler-dimensions_data_ids`
165 """
167 __slots__ = ()
169 _serializedType = SerializedDataCoordinate
171 @staticmethod
172 def standardize(
173 mapping: NameLookupMapping[Dimension, DataIdValue] | None = None,
174 *,
175 dimensions: Iterable[str] | DimensionGroup | DimensionGraph | None = None,
176 graph: DimensionGraph | None = None,
177 universe: DimensionUniverse | None = None,
178 defaults: DataCoordinate | None = None,
179 **kwargs: Any,
180 ) -> DataCoordinate:
181 """Standardize the supplied dataId.
183 Adapts an arbitrary mapping and/or additional arguments into a true
184 `DataCoordinate`, or augment an existing one.
186 Parameters
187 ----------
188 mapping : `~collections.abc.Mapping`, optional
189 An informal data ID that maps dimensions or dimension names to
190 their primary key values (may also be a true `DataCoordinate`).
191 dimensions : `~collections.abc.Iterable` [ `str` ], `DimensionGroup` \
192 or `DimensionGraph`, optional
193 The dimensions to be identified by the new `DataCoordinate`. If not
194 provided, will be inferred from the keys of ``mapping`` and
195 ``**kwargs``, and ``universe`` must be provided unless ``mapping``
196 is already a `DataCoordinate`.
197 graph : `DimensionGraph`, optional
198 Like ``dimensions``, but requires a ``DimensionGraph`` instance.
199 Ignored if ``dimensions`` is provided. Deprecated and will be
200 removed after v27.
201 universe : `DimensionUniverse`
202 All known dimensions and their relationships; used to expand and
203 validate dependencies when ``graph`` is not provided.
204 defaults : `DataCoordinate`, optional
205 Default dimension key-value pairs to use when needed. These are
206 never used to infer ``graph``, and are ignored if a different value
207 is provided for the same key in ``mapping`` or `**kwargs``.
208 **kwargs
209 Additional keyword arguments are treated like additional key-value
210 pairs in ``mapping``.
212 Returns
213 -------
214 coordinate : `DataCoordinate`
215 A validated `DataCoordinate` instance.
217 Raises
218 ------
219 TypeError
220 Raised if the set of optional arguments provided is not supported.
221 KeyError
222 Raised if a key-value pair for a required dimension is missing.
223 """
224 universe = (
225 universe
226 or getattr(dimensions, "universe", None)
227 or getattr(graph, "universe", None)
228 or getattr(mapping, "universe", None)
229 )
230 if universe is None:
231 raise TypeError(
232 "universe must be provided, either directly or via dimensions, mapping, or graph."
233 )
234 if graph is not None:
235 # TODO: remove argument on DM-41326.
236 warnings.warn(
237 "The 'graph' argument to DataCoordinate.standardize is deprecated in favor of the "
238 "'dimensions' argument, and will be removed after v27.",
239 category=FutureWarning,
240 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
241 )
242 dimensions = graph.names
243 if dimensions is not None:
244 dimensions = universe.conform(dimensions)
245 del graph # make sure we don't actualy use this below
246 new_mapping: dict[str, DataIdValue] = {}
247 if isinstance(mapping, DataCoordinate):
248 if dimensions is None:
249 if not kwargs:
250 # Already standardized to exactly what we want.
251 return mapping
252 elif kwargs.keys().isdisjoint(dimensions.names):
253 # User provided kwargs, but told us not to use them by
254 # passing in dimensions that are disjoint from those kwargs.
255 # This is not necessarily user error - it's a useful pattern
256 # to pass in all of the key-value pairs you have and let the
257 # code here pull out only what it needs.
258 return mapping.subset(dimensions.names)
259 new_mapping.update((name, mapping[name]) for name in mapping.dimensions.required)
260 if mapping.hasFull():
261 new_mapping.update((name, mapping[name]) for name in mapping.dimensions.implied)
262 elif isinstance(mapping, NamedKeyMapping):
263 warnings.warn(
264 "Passing a NamedKeyMapping to DataCoordinate.standardize is deprecated, and will be "
265 "removed after v27.",
266 category=FutureWarning,
267 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
268 )
269 new_mapping.update(mapping.byName())
270 elif mapping is not None:
271 new_mapping.update(mapping)
272 new_mapping.update(kwargs)
273 if dimensions is None:
274 if defaults is not None:
275 universe = defaults.universe
276 elif universe is None:
277 raise TypeError("universe must be provided if graph is not.")
278 dimensions = DimensionGroup(universe, new_mapping.keys())
279 if not dimensions:
280 return DataCoordinate.make_empty(universe)
281 # Some backends cannot handle numpy.int64 type which is a subclass of
282 # numbers.Integral; convert that to int.
283 for k, v in new_mapping.items():
284 if isinstance(v, numbers.Integral):
285 new_mapping[k] = int(v) # type: ignore
286 if defaults is not None:
287 for k, v in defaults.mapping.items():
288 new_mapping.setdefault(k, v)
289 if new_mapping.keys() >= dimensions.names:
290 return DataCoordinate.from_full_values(
291 dimensions, tuple(new_mapping[name] for name in dimensions.data_coordinate_keys)
292 )
293 else:
294 try:
295 values = tuple(new_mapping[name] for name in dimensions.required)
296 except KeyError as err:
297 raise KeyError(f"No value in data ID ({mapping}) for required dimension {err}.") from err
298 return DataCoordinate.from_required_values(dimensions, values)
300 @property
301 @abstractmethod
302 def mapping(self) -> Mapping[str, DataIdValue]:
303 """A mapping view of the data ID with keys for all dimensions it has
304 values for.
305 """
306 raise NotImplementedError()
308 @property
309 @abstractmethod
310 def required(self) -> Mapping[str, DataIdValue]:
311 """A mapping view of the data ID with keys for just its required
312 dimensions.
313 """
314 raise NotImplementedError()
316 @property
317 @abstractmethod
318 def required_values(self) -> tuple[DataIdValue, ...]:
319 """The required values (only) of this data ID as a tuple.
321 Element order is consistent with `required`.
323 In contexts where all data IDs have the same dimensions, comparing and
324 hashing these tuples can be much faster than comparing the original
325 `DataCoordinate` instances.
326 """
327 raise NotImplementedError()
329 @property
330 def full_values(self) -> tuple[DataIdValue, ...]:
331 """The full values (only) of this data ID as a tuple.
333 Element order is consistent with `DimensionGroup.data_coordinate_keys`,
334 i.e. all required dimensions followed by all implied dimensions.
335 """
336 raise ValueError(f"DataCoordinate {self} has only required values.")
338 @staticmethod
339 def makeEmpty(universe: DimensionUniverse) -> DataCoordinate:
340 """Return an empty `DataCoordinate`.
342 It identifies the null set of dimensions.
344 Parameters
345 ----------
346 universe : `DimensionUniverse`
347 Universe to which this null dimension set belongs.
349 Returns
350 -------
351 dataId : `DataCoordinate`
352 A data ID object that identifies no dimensions. `hasFull` and
353 `hasRecords` are guaranteed to return `True`, because both `full`
354 and `records` are just empty mappings.
355 """
356 return DataCoordinate.make_empty(universe)
358 @staticmethod
359 def make_empty(universe: DimensionUniverse) -> DataCoordinate:
360 """Return an empty `DataCoordinate`.
362 It identifies the null set of dimensions.
364 Parameters
365 ----------
366 universe : `DimensionUniverse`
367 Universe to which this null dimension set belongs.
369 Returns
370 -------
371 data_id : `DataCoordinate`
372 A data ID object that identifies no dimensions. `hasFull` and
373 `hasRecords` are guaranteed to return `True`, because both `full`
374 and `records` are just empty mappings.
375 """
376 return _ExpandedTupleDataCoordinate(universe.empty.as_group(), (), {})
378 # TODO: remove on DM-41326.
379 @staticmethod
380 @deprecated(
381 "fromRequiredValues is deprecated in favor of from_required_values, "
382 "which takes a DimensionGroup instead of a DimensionGraph. It will be "
383 "removed after v27.",
384 version="v27",
385 category=FutureWarning,
386 )
387 def fromRequiredValues(graph: DimensionGraph, values: tuple[DataIdValue, ...]) -> DataCoordinate:
388 """Construct a `DataCoordinate` from required dimension values.
390 This method is deprecated in favor of `from_required_values`.
392 This is a low-level interface with at most assertion-level checking of
393 inputs. Most callers should use `standardize` instead.
395 Parameters
396 ----------
397 graph : `DimensionGraph`
398 Dimensions this data ID will identify.
399 values : `tuple` [ `int` or `str` ]
400 Tuple of primary key values corresponding to ``graph.required``,
401 in that order.
403 Returns
404 -------
405 dataId : `DataCoordinate`
406 A data ID object that identifies the given dimensions.
407 ``dataId.hasFull()`` will return `True` only if ``graph.implied``
408 is empty. ``dataId.hasRecords()`` will return `True`
409 if and only if ``graph`` is empty.
410 """
411 return DataCoordinate.from_required_values(graph._group, values)
413 @staticmethod
414 def from_required_values(dimensions: DimensionGroup, values: tuple[DataIdValue, ...]) -> DataCoordinate:
415 """Construct a `DataCoordinate` from required dimension values.
417 This is a low-level interface with at most assertion-level checking of
418 inputs. Most callers should use `standardize` instead.
420 Parameters
421 ----------
422 dimensions : `DimensionGroup`
423 Dimensions this data ID will identify.
424 values : `tuple` [ `int` or `str` ]
425 Tuple of primary key values corresponding to ``graph.required``, in
426 that order.
428 Returns
429 -------
430 data_id : `DataCoordinate`
431 A data ID object that identifies the given dimensions.
432 ``dataId.hasFull()`` will return `True` only if
433 ``dimensions.implied`` is empty. ``dataId.hasRecords()`` will
434 return `True` if and only if ``graph`` is empty.
435 """
436 assert len(dimensions.required) == len(
437 values
438 ), f"Inconsistency between dimensions {dimensions.required} and required values {values}."
439 if not dimensions:
440 return DataCoordinate.make_empty(dimensions.universe)
441 if not dimensions.implied:
442 return _FullTupleDataCoordinate(dimensions, values)
443 return _RequiredTupleDataCoordinate(dimensions, values)
445 # TODO: remove on DM-41326.
446 @staticmethod
447 @deprecated(
448 "fromFullValues is deprecated in favor of from_full_values, "
449 "which takes a DimensionGroup instead of a DimensionGraph. It will be "
450 "removed after v27.",
451 version="v27",
452 category=FutureWarning,
453 )
454 def fromFullValues(graph: DimensionGraph, values: tuple[DataIdValue, ...]) -> DataCoordinate:
455 """Construct a `DataCoordinate` from all dimension values.
457 This method is deprecated in favor of `from_full_values`.
459 This is a low-level interface with at most assertion-level checking of
460 inputs. Most callers should use `standardize` instead.
462 Parameters
463 ----------
464 graph : `DimensionGraph`
465 Dimensions this data ID will identify.
466 values : `tuple` [ `int` or `str` ]
467 Tuple of primary key values corresponding to
468 ``itertools.chain(graph.required, graph.implied)``, in that order.
469 Note that this is _not_ the same order as ``graph.dimensions``,
470 though these contain the same elements.
472 Returns
473 -------
474 dataId : `DataCoordinate`
475 A data ID object that identifies the given dimensions.
476 ``dataId.hasFull()`` will always return `True`.
477 ``dataId.hasRecords()`` will only return `True` if ``graph`` is
478 empty.
479 """
480 return DataCoordinate.from_full_values(graph._group, values)
482 @staticmethod
483 def from_full_values(dimensions: DimensionGroup, values: tuple[DataIdValue, ...]) -> DataCoordinate:
484 """Construct a `DataCoordinate` from all dimension values.
486 This is a low-level interface with at most assertion-level checking of
487 inputs. Most callers should use `standardize` instead.
489 Parameters
490 ----------
491 dimensions : `DimensionGroup`
492 Dimensions this data ID will identify.
493 values : `tuple` [ `int` or `str` ]
494 Tuple of primary key values corresponding to
495 ``itertools.chain(graph.required, graph.implied)``, in that order.
496 Note that this is _not_ the same order as ``graph.dimensions``,
497 though these contain the same elements.
499 Returns
500 -------
501 data_id : `DataCoordinate`
502 A data ID object that identifies the given dimensions.
503 ``dataId.hasFull()`` will always return `True`.
504 ``dataId.hasRecords()`` will only return `True` if ``dimensions``
505 is empty.
506 """
507 assert len(dimensions) == len(
508 values
509 ), f"Inconsistency between dimensions {dimensions.data_coordinate_keys} and full values {values}."
510 if not dimensions:
511 return DataCoordinate.make_empty(dimensions.universe)
512 return _FullTupleDataCoordinate(dimensions, values)
514 def __bool__(self) -> bool:
515 return bool(self.dimensions)
517 def __hash__(self) -> int:
518 return hash((self.dimensions,) + self.required_values)
520 def __eq__(self, other: Any) -> bool:
521 if not isinstance(other, DataCoordinate):
522 other = DataCoordinate.standardize(other, universe=self.universe)
523 return self.dimensions == other.dimensions and self.required_values == other.required_values
525 def __repr__(self) -> str:
526 # We can't make repr yield something that could be exec'd here without
527 # printing out the whole DimensionUniverse.
528 return str(self.mapping)
530 def __lt__(self, other: Any) -> bool:
531 if not isinstance(other, DataCoordinate):
532 return NotImplemented
533 # Unlike repr() we only use required keys here to ensure that __eq__
534 # can not be true simultaneously with __lt__ being true.
535 return self.required_values < other.required_values
537 # TODO: remove on DM-41326.
538 @deprecated(
539 "Using DataCoordinate as a Mapping is deprecated in favor of the "
540 ".mapping and .required attributes, and will be dropped after v27.",
541 version="v27",
542 category=FutureWarning,
543 )
544 def __iter__(self) -> Iterator[Dimension]:
545 return iter(self.keys())
547 # TODO: remove on DM-41326.
548 @deprecated(
549 "Using DataCoordinate as a Mapping is deprecated in favor of the "
550 ".mapping and .required attributes, and will be dropped after v27.",
551 version="v27",
552 category=FutureWarning,
553 )
554 def __len__(self) -> int:
555 return len(self.keys())
557 # TODO: remove on DM-41326.
558 @deprecated(
559 "Using DataCoordinate as a Mapping is deprecated in favor of the "
560 ".mapping and .required attributes, and will be dropped after v27.",
561 version="v27",
562 category=FutureWarning,
563 )
564 def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
565 return self.graph.required
567 # TODO: remove on DM-41326.
568 @property
569 @deprecated(
570 "DataCoordinate.names is deprecated in favor of the .dimensions "
571 "attribute, and will be dropped after v27.",
572 version="v27",
573 category=FutureWarning,
574 )
575 def names(self) -> Set[str]:
576 """Names of the required dimensions identified by this data ID.
578 They are returned in the same order as `keys`
579 (`collections.abc.Set` [ `str` ]).
580 """
581 return self.keys().names
583 @abstractmethod
584 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
585 """Return a `DataCoordinate` whose graph is a subset of ``self.graph``.
587 Parameters
588 ----------
589 dimensions : `DimensionGraph`, `DimensionGroup`, or \
590 `~collections.abc.Iterable` [ `str` ]
591 The dimensions identified by the returned `DataCoordinate`.
592 Passing a `DimensionGraph` is deprecated and support will be
593 dropped after v27.
595 Returns
596 -------
597 coordinate : `DataCoordinate`
598 A `DataCoordinate` instance that identifies only the given
599 dimensions. May be ``self`` if ``graph == self.graph``.
601 Raises
602 ------
603 KeyError
604 Raised if the primary key value for one or more required dimensions
605 is unknown. This may happen even if the required subset of the new
606 dimensions are not a subset of the dimensions actually known by
607 this data ID.. As an example, consider trying to go from a data ID
608 with dimensions {instrument, physical_filter, band} to just
609 {instrument, band}; band is implied by physical_filter and hence
610 would have no value in the original data ID if ``self.hasFull()``
611 is `False`.
613 Notes
614 -----
615 If `hasFull` and `hasRecords` return `True` on ``self``, they will
616 return `True` (respectively) on the returned `DataCoordinate` as well.
617 The converse does not hold.
618 """
619 # TODO: update docs r.e. deprecation on DM-41326.
620 raise NotImplementedError()
622 @abstractmethod
623 def union(self, other: DataCoordinate) -> DataCoordinate:
624 """Combine two data IDs.
626 Yields a new one that identifies all dimensions that either of them
627 identify.
629 Parameters
630 ----------
631 other : `DataCoordinate`
632 Data ID to combine with ``self``.
634 Returns
635 -------
636 unioned : `DataCoordinate`
637 A `DataCoordinate` instance that satisfies
638 ``unioned.dimensions == self.dimensions.union(other.dimensions)``.
639 Will preserve ``hasFull`` and ``hasRecords`` whenever possible.
641 Notes
642 -----
643 No checking for consistency is performed on values for keys that
644 ``self`` and ``other`` have in common, and which value is included in
645 the returned data ID is not specified.
646 """
647 raise NotImplementedError()
649 @abstractmethod
650 def expanded(
651 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
652 ) -> DataCoordinate:
653 """Return a `DataCoordinate` that holds the given records.
655 Guarantees that `hasRecords` returns `True`.
657 This is a low-level interface with at most assertion-level checking of
658 inputs. Most callers should use `Registry.expandDataId` instead.
660 Parameters
661 ----------
662 records : `~collections.abc.Mapping` [ `str`, `DimensionRecord` or \
663 `None` ]
664 A `NamedKeyMapping` with `DimensionElement` keys or a regular
665 `~collections.abc.Mapping` with `str` (`DimensionElement` name)
666 keys and `DimensionRecord` values. Keys must cover all elements in
667 ``self.graph.elements``. Values may be `None`, but only to reflect
668 actual NULL values in the database, not just records that have not
669 been fetched. Passing a `NamedKeyMapping` is deprecated and will
670 not be supported after v27.
671 """
672 # TODO: update docs r.e. deprecation on DM-41326.
673 raise NotImplementedError()
675 @property
676 def universe(self) -> DimensionUniverse:
677 """Universe that defines all known compatible dimensions.
679 The universe will be compatible with this coordinate
680 (`DimensionUniverse`).
681 """
682 return self.dimensions.universe
684 @property
685 @abstractmethod
686 def dimensions(self) -> DimensionGroup:
687 """Dimensions identified by this data ID (`DimensionGroup`).
689 Note that values are only required to be present for dimensions in
690 ``self.dimensions.required``; all others may be retrieved (from a
691 `Registry`) given these.
692 """
693 raise NotImplementedError()
695 # TODO: remove on DM-41326.
696 @property
697 @deprecated(
698 "DataCoordinate.graph is deprecated in favor of .dimensions, and will be dropped after v27.",
699 version="v27",
700 category=FutureWarning,
701 )
702 def graph(self) -> DimensionGraph:
703 """Dimensions identified by this data ID (`DimensionGraph`).
705 Note that values are only required to be present for dimensions in
706 ``self.graph.required``; all others may be retrieved (from a
707 `Registry`) given these.
708 """
709 return self.dimensions._as_graph()
711 @abstractmethod
712 def hasFull(self) -> bool:
713 """Whether this data ID contains implied and required values.
715 Returns
716 -------
717 state : `bool`
718 If `True`, `__getitem__`, `get`, and `__contains__` (but not
719 `keys`!) will act as though the mapping includes key-value pairs
720 for implied dimensions, and the `full` property may be used. If
721 `False`, these operations only include key-value pairs for required
722 dimensions, and accessing `full` is an error. Always `True` if
723 there are no implied dimensions.
724 """
725 raise NotImplementedError()
727 # TODO: remove on DM-41326.
728 @property
729 @deprecated(
730 "DataCoordinate.full is deprecated in favor of .mapping, and will be dropped after v27.",
731 version="v27",
732 category=FutureWarning,
733 )
734 @abstractmethod
735 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
736 """Return mapping for all dimensions in ``self.dimensions``.
738 The mapping includes key-value pairs for all dimensions in
739 ``self.dimensions``, including implied.
741 Accessing this attribute if `hasFull` returns `False` is a logic error
742 that may raise an exception of unspecified type either immediately or
743 when implied keys are accessed via the returned mapping, depending on
744 the implementation and whether assertions are enabled.
745 """
746 raise NotImplementedError()
748 # TODO: remove on DM-41326.
749 @deprecated(
750 "DataCoordinate.values_tuple() is deprecated in favor of .required_values, and will be dropped "
751 "after v27.",
752 version="v27",
753 category=FutureWarning,
754 )
755 def values_tuple(self) -> tuple[DataIdValue, ...]:
756 """Return the required values (only) of this data ID as a tuple.
758 In contexts where all data IDs have the same dimensions, comparing and
759 hashing these tuples can be *much* faster than comparing the original
760 `DataCoordinate` instances.
761 """
762 return self.required_values
764 @abstractmethod
765 def hasRecords(self) -> bool:
766 """Whether this data ID contains records.
768 These are the records for all of the dimension elements it identifies.
770 Returns
771 -------
772 state : `bool`
773 If `True`, the following attributes may be accessed:
775 - `records`
776 - `region`
777 - `timespan`
778 - `pack`
780 If `False`, accessing any of these is considered a logic error.
781 """
782 raise NotImplementedError()
784 @property
785 def records(self) -> NamedKeyMapping[DimensionElement, DimensionRecord | None]:
786 """A mapping that contains `DimensionRecord` objects for all
787 elements identified by this data ID.
789 This mapping will become a regular `~collections.abc.Mapping` with
790 `str` keys after v27.
792 Notes
793 -----
794 The values of this mapping may be `None` if and only if there is no
795 record for that element with these dimensions in the database (which
796 means some foreign key field must have a NULL value).
798 Accessing this attribute if `hasRecords` returns `False` is a logic
799 error that may raise an exception of unspecified type either
800 immediately or when the returned mapping is used, depending on the
801 implementation and whether assertions are enabled.
802 """
803 assert self.hasRecords(), "records may only be accessed if hasRecords() returns True."
804 return _DataCoordinateRecordsView(self)
806 @abstractmethod
807 def _record(self, name: str) -> DimensionRecord | None:
808 """Protected implementation hook that backs the ``records`` attribute.
810 Parameters
811 ----------
812 name : `str`
813 The name of a `DimensionElement`, guaranteed to be in
814 ``self.dimensions.elements``.
816 Returns
817 -------
818 record : `DimensionRecord` or `None`
819 The dimension record for the given element identified by this
820 data ID, or `None` if there is no such record.
821 """
822 raise NotImplementedError()
824 @property
825 def region(self) -> Region | None:
826 """Spatial region associated with this data ID.
828 (`lsst.sphgeom.Region` or `None`).
830 This is `None` if and only if ``self.dimensions.spatial`` is empty.
832 Accessing this attribute if `hasRecords` returns `False` is a logic
833 error that may or may not raise an exception, depending on the
834 implementation and whether assertions are enabled.
835 """
836 assert self.hasRecords(), "region may only be accessed if hasRecords() returns True."
837 regions = []
838 for family in self.dimensions.spatial:
839 element = family.choose(self.dimensions.elements, self.universe)
840 record = self._record(element.name)
841 if record is None or record.region is None:
842 return None
843 else:
844 regions.append(record.region)
845 return _intersectRegions(*regions)
847 @property
848 def timespan(self) -> Timespan | None:
849 """Temporal interval associated with this data ID.
851 (`Timespan` or `None`).
853 This is `None` if and only if ``self.dimensions.temporal`` is empty.
855 Accessing this attribute if `hasRecords` returns `False` is a logic
856 error that may or may not raise an exception, depending on the
857 implementation and whether assertions are enabled.
858 """
859 assert self.hasRecords(), "timespan may only be accessed if hasRecords() returns True."
860 timespans = []
861 for family in self.dimensions.temporal:
862 element = family.choose(self.dimensions.elements, self.universe)
863 record = self._record(element.name)
864 # DimensionRecord subclasses for temporal elements always have
865 # .timespan, but they're dynamic so this can't be type-checked.
866 if record is None or record.timespan is None:
867 return None
868 else:
869 timespans.append(record.timespan)
870 if not timespans:
871 return None
872 elif len(timespans) == 1:
873 return timespans[0]
874 else:
875 return Timespan.intersection(*timespans)
877 def to_simple(self, minimal: bool = False) -> SerializedDataCoordinate:
878 """Convert this class to a simple python type.
880 This is suitable for serialization.
882 Parameters
883 ----------
884 minimal : `bool`, optional
885 Use minimal serialization. If set the records will not be attached.
887 Returns
888 -------
889 simple : `SerializedDataCoordinate`
890 The object converted to simple form.
891 """
892 # Convert to a dict form
893 records: dict[str, SerializedDimensionRecord] | None
894 if not minimal and self.hasRecords():
895 records = {
896 k: v.to_simple() for k in self.dimensions.elements if (v := self.records[k]) is not None
897 }
898 else:
899 records = None
901 return SerializedDataCoordinate(dataId=dict(self.mapping), records=records)
903 @classmethod
904 def from_simple(
905 cls,
906 simple: SerializedDataCoordinate,
907 universe: DimensionUniverse | None = None,
908 registry: Registry | None = None,
909 ) -> DataCoordinate:
910 """Construct a new object from the simplified form.
912 The data is assumed to be of the form returned from the `to_simple`
913 method.
915 Parameters
916 ----------
917 simple : `dict` of [`str`, `Any`]
918 The `dict` returned by `to_simple()`.
919 universe : `DimensionUniverse`
920 Object that manages all known dimensions.
921 registry : `lsst.daf.butler.Registry`, optional
922 Registry from which a universe can be extracted. Can be `None`
923 if universe is provided explicitly.
925 Returns
926 -------
927 dataId : `DataCoordinate`
928 Newly-constructed object.
929 """
930 key = (frozenset(simple.dataId.items()), simple.records is not None)
931 cache = PersistenceContextVars.dataCoordinates.get()
932 if cache is not None and (result := cache.get(key)) is not None:
933 return result
934 if universe is None and registry is None:
935 raise ValueError("One of universe or registry is required to convert a dict to a DataCoordinate")
936 if universe is None and registry is not None:
937 universe = registry.dimensions
938 if universe is None:
939 # this is for mypy
940 raise ValueError("Unable to determine a usable universe")
942 dataId = cls.standardize(simple.dataId, universe=universe)
943 if simple.records:
944 dataId = dataId.expanded(
945 {k: DimensionRecord.from_simple(v, universe=universe) for k, v in simple.records.items()}
946 )
947 if cache is not None:
948 cache[key] = dataId
949 return dataId
951 to_json = to_json_pydantic
952 from_json: ClassVar = classmethod(from_json_pydantic)
955DataId = DataCoordinate | Mapping[str, Any]
956"""A type-annotation alias for signatures that accept both informal data ID
957dictionaries and validated `DataCoordinate` instances.
958"""
961# Deprecated by having its only public access (DataCoordinate.full) deprecated.
962# TODO: remove on DM-41326.
963class _DataCoordinateFullView(NamedKeyMapping[Dimension, DataIdValue]):
964 """View class for `DataCoordinate.full`.
966 Provides the default implementation for
967 `DataCoordinate.full`.
969 Parameters
970 ----------
971 target : `DataCoordinate`
972 The `DataCoordinate` instance this object provides a view of.
973 """
975 def __init__(self, target: _BasicTupleDataCoordinate):
976 self._target = target
978 __slots__ = ("_target",)
980 def __repr__(self) -> str:
981 return repr(self._target)
983 def __getitem__(self, key: DataIdKey) -> DataIdValue:
984 return self._target[key]
986 def __iter__(self) -> Iterator[Dimension]:
987 return iter(self.keys())
989 def __len__(self) -> int:
990 return len(self.keys())
992 def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
993 return self._target.graph.dimensions
995 @property
996 def names(self) -> Set[str]:
997 # Docstring inherited from `NamedKeyMapping`.
998 return self.keys().names
1001# TODO: Make a Mapping[str, DimensionRecord | None] on DM-41326.
1002class _DataCoordinateRecordsView(NamedKeyMapping[DimensionElement, DimensionRecord | None]):
1003 """View class for `DataCoordinate.records`.
1005 Provides the default implementation for
1006 `DataCoordinate.records`.
1008 Parameters
1009 ----------
1010 target : `DataCoordinate`
1011 The `DataCoordinate` instance this object provides a view of.
1012 """
1014 def __init__(self, target: DataCoordinate):
1015 self._target = target
1017 __slots__ = ("_target",)
1019 def __repr__(self) -> str:
1020 terms = [f"{d}: {self[d]!r}" for d in self._target.graph.elements.names]
1021 return "{{{}}}".format(", ".join(terms))
1023 def __str__(self) -> str:
1024 return "\n".join(str(v) for v in self.values())
1026 def __getitem__(self, key: DimensionElement | str) -> DimensionRecord | None:
1027 if isinstance(key, DimensionElement):
1028 warnings.warn(
1029 "Using Dimension keys in DataCoordinate is deprecated and will not be supported after v27.",
1030 category=FutureWarning,
1031 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1032 )
1033 key = key.name
1034 return self._target._record(key)
1036 # TODO: fix on DM-41326.
1037 @deprecated(
1038 "Iteration over DataCoordinate.records is deprecated as the key type will change to 'str' after "
1039 "v27. Use DataCoordinate.dimensions.elements to get the names of all dimension elements instead.",
1040 version="v27",
1041 category=FutureWarning,
1042 )
1043 def __iter__(self) -> Iterator[DimensionElement]:
1044 return iter(self.keys())
1046 def __len__(self) -> int:
1047 return len(self.keys())
1049 # TODO: remove on DM-41326.
1050 # Deprecation warning will come from using .graph.
1051 def keys(self) -> NamedValueAbstractSet[DimensionElement]: # type: ignore
1052 return self._target.graph.elements
1054 @property
1055 @deprecated(
1056 "DataCoordinate.records.names is deprecated in favor of DataCoordinate.dimensions.elements and "
1057 "will be removed after v27.",
1058 version="v27",
1059 category=FutureWarning,
1060 )
1061 def names(self) -> Set[str]:
1062 # Docstring inherited from `NamedKeyMapping`.
1063 return self.keys().names
1066class _BasicTupleDataCoordinate(DataCoordinate):
1067 """Intermediate base class for the standard implementation of
1068 `DataCoordinate`.
1070 This class should only be accessed outside this module via the
1071 `DataCoordinate` interface, and should only be constructed via the static
1072 methods there.
1074 Parameters
1075 ----------
1076 dimensions : `DimensionGroup`
1077 The dimensions to be identified.
1078 values : `tuple` [ `int` or `str` ]
1079 Data ID values, ordered to match
1080 ``dimensions.data_coordinate_keys``. May include values for just
1081 required dimensions (which always come first) or all dimensions
1082 (concrete subclasses implementations will care which).
1083 """
1085 def __init__(self, dimensions: DimensionGroup, values: tuple[DataIdValue, ...]):
1086 self._dimensions = dimensions
1087 self._values = values
1089 __slots__ = ("_dimensions", "_values")
1091 @property
1092 def dimensions(self) -> DimensionGroup:
1093 # Docstring inherited from DataCoordinate.
1094 return self._dimensions
1096 @property
1097 def required(self) -> Mapping[str, DataIdValue]:
1098 # Docstring inherited from DataCoordinate.
1099 return _DataCoordinateRequiredMappingView(self)
1101 def __getitem__(self, key: DataIdKey) -> DataIdValue:
1102 # Docstring inherited from DataCoordinate.
1103 # TODO: remove on DM-41326.
1104 if isinstance(key, Dimension):
1105 warnings.warn(
1106 "Using Dimension keys in DataCoordinate is deprecated and will not be supported after v27.",
1107 category=FutureWarning,
1108 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1109 )
1110 key = key.name
1111 index = self._dimensions._data_coordinate_indices[key]
1112 try:
1113 return self._values[index]
1114 except IndexError:
1115 # Caller asked for an implied dimension, but this object only has
1116 # values for the required ones.
1117 raise KeyError(key) from None
1119 # TODO: remove on DM-41326.
1120 @deprecated(
1121 "Using DataCoordinate as a NamedKeyMapping is deprecated in favor of the "
1122 ".mapping and .required attributes, and will be dropped after v27. "
1123 "Use `dict(data_id.required)` as an exact replacement for `data_id.byName()`.",
1124 version="v27",
1125 category=FutureWarning,
1126 )
1127 def byName(self) -> dict[str, DataIdValue]:
1128 # Docstring inheritance.
1129 # Reimplementation is for optimization; `required_values` is much
1130 # faster to iterate over than values() because it doesn't go through
1131 # `__getitem__`.
1132 return dict(zip(self.names, self.required_values, strict=True))
1134 def hasRecords(self) -> bool:
1135 # Docstring inherited from DataCoordinate.
1136 return False
1138 def _record(self, name: str) -> DimensionRecord | None:
1139 # Docstring inherited from DataCoordinate.
1140 raise AssertionError()
1142 def __getattr__(self, name: str) -> Any:
1143 if name in self.dimensions.elements:
1144 raise AttributeError(
1145 f"Dimension record attribute {name!r} is only available on expanded DataCoordinates."
1146 )
1147 raise AttributeError(name)
1150class _DataCoordinateRequiredMappingView(Mapping[str, DataIdValue]):
1151 """A DataCoordinate Mapping view class whose keys are just the required
1152 dimensions.
1153 """
1155 def __init__(self, target: DataCoordinate):
1156 self._target = target
1158 __slots__ = ("_target",)
1160 def __getitem__(self, key: str) -> DataIdValue:
1161 if key not in self._target.dimensions.required:
1162 raise KeyError(key)
1163 return self._target[key]
1165 def __len__(self) -> int:
1166 return len(self._target.dimensions.required)
1168 def __iter__(self) -> Iterator[str]:
1169 return iter(self._target.dimensions.required)
1171 def __repr__(self) -> str:
1172 return f"{{{', '.join(f'{k}: {v!r}' for k, v in self.items())}}}"
1175class _DataCoordinateFullMappingView(Mapping[str, DataIdValue]):
1176 """A DataCoordinate Mapping view class whose keys are all dimensions."""
1178 def __init__(self, target: DataCoordinate):
1179 self._target = target
1181 __slots__ = ("_target",)
1183 def __getitem__(self, key: str) -> DataIdValue:
1184 return self._target[key]
1186 def __len__(self) -> int:
1187 return len(self._target.dimensions)
1189 def __iter__(self) -> Iterator[str]:
1190 return iter(self._target.dimensions.data_coordinate_keys)
1192 def __repr__(self) -> str:
1193 return f"{{{', '.join(f'{k}: {v!r}' for k, v in self.items())}}}"
1196class _RequiredTupleDataCoordinate(_BasicTupleDataCoordinate):
1197 """A `DataCoordinate` implementation that has values for required
1198 dimensions only, when implied dimensions already exist.
1200 Note that `_FullTupleDataCoordinate` should be used if there are no
1201 implied dimensions.
1203 This class should only be accessed outside this module via the
1204 `DataCoordinate` interface, and should only be constructed via calls to
1205 `DataCoordinate.from_full_values`.
1206 """
1208 __slots__ = ()
1210 @property
1211 def mapping(self) -> Mapping[str, DataIdValue]:
1212 # Docstring inherited from DataCoordinate.
1213 return _DataCoordinateRequiredMappingView(self)
1215 @property
1216 def required_values(self) -> tuple[DataIdValue, ...]:
1217 # Docstring inherited from DataCoordinate.
1218 return self._values
1220 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1221 # Docstring inherited from DataCoordinate.
1222 dimensions = self.universe.conform(dimensions)
1223 if self._dimensions == dimensions:
1224 return self
1225 elif self._dimensions.required >= dimensions.names:
1226 return DataCoordinate.from_full_values(
1227 dimensions,
1228 tuple(self[k] for k in dimensions.data_coordinate_keys),
1229 )
1230 else:
1231 return DataCoordinate.from_required_values(
1232 dimensions, tuple(self[k] for k in dimensions.required)
1233 )
1235 def union(self, other: DataCoordinate) -> DataCoordinate:
1236 # Docstring inherited from DataCoordinate.
1237 dimensions = self.dimensions.union(other.dimensions)
1238 # See if the other one is already what we want to return. We don't
1239 # shortcut-return 'self' because `other` might have full values or
1240 # even records, and we want to return the more complete data ID.
1241 if other.dimensions == dimensions:
1242 return other
1243 # General case with actual merging of dictionaries.
1244 values = dict(self.mapping)
1245 values.update(other.mapping)
1246 return DataCoordinate.standardize(values, dimensions=dimensions)
1248 # TODO: remove on DM-41326.
1249 @property
1250 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
1251 # Docstring inherited.
1252 raise AssertionError("full may only be accessed if hasFull() returns True.")
1254 def expanded(
1255 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1256 ) -> DataCoordinate:
1257 # Docstring inherited from DataCoordinate
1258 # Extract a complete values tuple from the attributes of the given
1259 # records. It's possible for these to be inconsistent with
1260 # self._values (which is a serious problem, of course), but we've
1261 # documented this as a no-checking API.
1262 values = self._values + tuple(
1263 getattr(records[d], cast(Dimension, self.universe[d]).primaryKey.name)
1264 for d in self._dimensions.implied
1265 )
1266 if isinstance(records, NamedKeyMapping):
1267 warnings.warn(
1268 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1269 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1270 category=FutureWarning,
1271 )
1272 return _ExpandedTupleDataCoordinate(self._dimensions, values, records)
1274 def hasFull(self) -> bool:
1275 # Docstring inherited from DataCoordinate.
1276 return False
1278 def __reduce__(self) -> tuple[Any, ...]:
1279 return (_RequiredTupleDataCoordinate, (self._dimensions, self._values))
1282class _FullTupleDataCoordinate(_BasicTupleDataCoordinate):
1283 """A `DataCoordinate` implementation that has values for all dimensions.
1285 This class should only be accessed outside this module via the
1286 `DataCoordinate` interface, and should only be constructed via calls to
1287 `DataCoordinate.from_full_values`.
1288 """
1290 __slots__ = ()
1292 @property
1293 def mapping(self) -> Mapping[str, DataIdValue]:
1294 # Docstring inherited from DataCoordinate.
1295 return _DataCoordinateFullMappingView(self)
1297 @property
1298 def required_values(self) -> tuple[DataIdValue, ...]:
1299 # Docstring inherited from DataCoordinate.
1300 return self._values[: len(self._dimensions.required)]
1302 @property
1303 def full_values(self) -> tuple[DataIdValue, ...]:
1304 # Docstring inherited from DataCoordinate.
1305 return self._values
1307 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1308 # Docstring inherited from DataCoordinate.
1309 dimensions = self.universe.conform(dimensions)
1310 if self._dimensions == dimensions:
1311 return self
1312 return DataCoordinate.from_full_values(
1313 dimensions,
1314 tuple(self[k] for k in dimensions.data_coordinate_keys),
1315 )
1317 def union(self, other: DataCoordinate) -> DataCoordinate:
1318 # Docstring inherited from DataCoordinate.
1319 dimensions = self.dimensions.union(other.dimensions)
1320 # See if one or both input data IDs is already what we want to return;
1321 # if so, return the most complete one we have.
1322 if other.dimensions == dimensions and other.hasRecords():
1323 return other
1324 elif self.dimensions == dimensions and not other.hasRecords():
1325 return self
1326 # General case with actual merging of dictionaries.
1327 values = dict(self.mapping)
1328 values.update(other.mapping)
1329 return DataCoordinate.standardize(values, dimensions=dimensions)
1331 # TODO: remove on DM-41326.
1332 @property
1333 @deprecated(
1334 "DataCoordinate.full is deprecated in favor of .mapping, and will be dropped after v27.",
1335 version="v27",
1336 category=FutureWarning,
1337 )
1338 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
1339 # Docstring inherited.
1340 return _DataCoordinateFullView(self)
1342 def expanded(
1343 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1344 ) -> DataCoordinate:
1345 # Docstring inherited from DataCoordinate
1346 if isinstance(records, NamedKeyMapping):
1347 warnings.warn(
1348 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1349 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1350 category=FutureWarning,
1351 )
1352 return _ExpandedTupleDataCoordinate(self._dimensions, self._values, records)
1354 def hasFull(self) -> bool:
1355 # Docstring inherited from DataCoordinate.
1356 return True
1358 def __reduce__(self) -> tuple[Any, ...]:
1359 return (_FullTupleDataCoordinate, (self._dimensions, self._values))
1362class _ExpandedTupleDataCoordinate(_FullTupleDataCoordinate):
1363 """A `DataCoordinate` implementation that directly holds `DimensionRecord`
1364 objects relevant to it.
1366 This class should only be accessed outside this module via the
1367 `DataCoordinate` interface, and should only be constructed via calls to
1368 `DataCoordinate.expanded`.
1370 Parameters
1371 ----------
1372 dimensions : `DimensionGroup`
1373 The dimensions to be identified.
1374 values : `tuple` [ `int` or `str` ]
1375 Data ID values, ordered to match
1376 ``dimensions._data_coordinate_indices``. Just include values for all
1377 dimensions.
1378 records : `~collections.abc.Mapping` [ `str`, `DimensionRecord` or `None` ]
1379 A `NamedKeyMapping` with `DimensionElement` keys or a regular
1380 `~collections.abc.Mapping` with `str` (`DimensionElement` name) keys
1381 and `DimensionRecord` values. Keys must cover all elements in
1382 ``self.dimensions.elements``. Values may be `None`, but only to
1383 reflect actual NULL values in the database, not just records that have
1384 not been fetched.
1385 """
1387 def __init__(
1388 self,
1389 dimensions: DimensionGroup,
1390 values: tuple[DataIdValue, ...],
1391 records: NameLookupMapping[DimensionElement, DimensionRecord | None],
1392 ):
1393 super().__init__(dimensions, values)
1394 assert super().hasFull(), "This implementation requires full dimension records."
1395 self._records = records
1397 __slots__ = ("_records",)
1399 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1400 # Docstring inherited from DataCoordinate.
1401 return super().subset(dimensions).expanded(self._records)
1403 def expanded(
1404 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1405 ) -> DataCoordinate:
1406 # Docstring inherited from DataCoordinate.
1407 if isinstance(records, NamedKeyMapping):
1408 warnings.warn(
1409 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1410 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1411 category=FutureWarning,
1412 )
1413 return self
1415 def union(self, other: DataCoordinate) -> DataCoordinate:
1416 # Docstring inherited from DataCoordinate.
1417 result = super().union(other)
1418 if not result.hasRecords() and other.hasRecords():
1419 records = {e: self._record(e) for e in self.dimensions.elements} | {
1420 e: other._record(e) for e in other.dimensions.elements
1421 }
1422 if records.keys() >= result.dimensions.elements:
1423 return result.expanded(records)
1424 return result
1426 def hasRecords(self) -> bool:
1427 # Docstring inherited from DataCoordinate.
1428 return True
1430 def _record(self, name: str) -> DimensionRecord | None:
1431 # Docstring inherited from DataCoordinate.
1432 return self._records[name]
1434 def __reduce__(self) -> tuple[Any, ...]:
1435 return (_ExpandedTupleDataCoordinate, (self._dimensions, self._values, self._records))
1437 def __getattr__(self, name: str) -> Any:
1438 try:
1439 return self._record(name)
1440 except KeyError:
1441 raise AttributeError(name) from None
1443 def __dir__(self) -> list[str]:
1444 result = list(super().__dir__())
1445 result.extend(self.dimensions.elements)
1446 return result