Coverage for python/lsst/daf/butler/dimensions/_coordinate.py: 47%
462 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-06 10:53 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-06 10:53 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28#
29# Design notes for this module are in
30# doc/lsst.daf.butler/dev/dataCoordinate.py.
31#
33from __future__ import annotations
35__all__ = ("DataCoordinate", "DataId", "DataIdKey", "DataIdValue", "SerializedDataCoordinate")
37import numbers
38import warnings
39from abc import abstractmethod
40from collections.abc import Iterable, Iterator, Mapping, Set
41from typing import TYPE_CHECKING, Any, ClassVar, Literal, cast, overload
43from deprecated.sphinx import deprecated
44from lsst.daf.butler._compat import _BaseModelCompat
45from lsst.sphgeom import IntersectionRegion, Region
46from lsst.utils.introspection import find_outside_stacklevel
48from .._named import NamedKeyMapping, NamedValueAbstractSet, NameLookupMapping
49from .._timespan import Timespan
50from ..json import from_json_pydantic, to_json_pydantic
51from ..persistence_context import PersistenceContextVars
52from ._elements import Dimension, DimensionElement
53from ._graph import DimensionGraph
54from ._group import DimensionGroup
55from ._records import DimensionRecord, SerializedDimensionRecord
57if TYPE_CHECKING: # Imports needed only for type annotations; may be circular.
58 from ..registry import Registry
59 from ._universe import DimensionUniverse
61DataIdKey = str | Dimension
62"""Type annotation alias for the keys that can be used to index a
63DataCoordinate.
64"""
66# Pydantic will cast int to str if str is first in the Union.
67DataIdValue = int | str | None
68"""Type annotation alias for the values that can be present in a
69DataCoordinate or other data ID.
70"""
73class SerializedDataCoordinate(_BaseModelCompat):
74 """Simplified model for serializing a `DataCoordinate`."""
76 dataId: dict[str, DataIdValue]
77 records: dict[str, SerializedDimensionRecord] | None = None
79 @classmethod
80 def direct(
81 cls, *, dataId: dict[str, DataIdValue], records: dict[str, dict] | None
82 ) -> SerializedDataCoordinate:
83 """Construct a `SerializedDataCoordinate` directly without validators.
85 This differs from the pydantic "construct" method in that the arguments
86 are explicitly what the model requires, and it will recurse through
87 members, constructing them from their corresponding `direct` methods.
89 This method should only be called when the inputs are trusted.
90 """
91 key = (frozenset(dataId.items()), records is not None)
92 cache = PersistenceContextVars.serializedDataCoordinateMapping.get()
93 if cache is not None and (result := cache.get(key)) is not None:
94 return result
96 if records is None:
97 serialized_records = None
98 else:
99 serialized_records = {k: SerializedDimensionRecord.direct(**v) for k, v in records.items()}
101 node = cls.model_construct(dataId=dataId, records=serialized_records)
103 if cache is not None:
104 cache[key] = node
105 return node
108def _intersectRegions(*args: Region) -> Region | None:
109 """Return the intersection of several regions.
111 For internal use by `ExpandedDataCoordinate` only.
113 If no regions are provided, returns `None`.
114 """
115 if len(args) == 0:
116 return None
117 else:
118 result = args[0]
119 for n in range(1, len(args)):
120 result = IntersectionRegion(result, args[n])
121 return result
124class DataCoordinate(NamedKeyMapping[Dimension, DataIdValue]):
125 """A validated data ID.
127 DataCoordinate guarantees that its key-value pairs identify at least all
128 required dimensions in a `DimensionGroup`.
130 Notes
131 -----
132 `DataCoordinate` is an ABC, but it provides `staticmethod` factory
133 functions for private concrete implementations that should be sufficient
134 for most purposes. `standardize` is the most flexible and safe of these;
135 the others (`make_empty`, `from_required_values`, and `from_full_values`)
136 are more specialized and perform little or no checking of inputs.
138 Lookups for implied dimensions (those in ``self.dimensions.implied``) are
139 supported if and only if `has_full_values` is `True`. This also sets the
140 keys of the `mapping` attribute. This means that `DataCoordinate` equality
141 is not the same as testing for equality on the `mapping` attribute
142 (instead, it is the same as testing for equality on the `required`
143 attribute).
145 See Also
146 --------
147 :ref:`lsst.daf.butler-dimensions_data_ids`
148 """
150 __slots__ = ()
152 _serializedType = SerializedDataCoordinate
154 @staticmethod
155 def standardize(
156 mapping: NameLookupMapping[Dimension, DataIdValue] | None = None,
157 *,
158 dimensions: Iterable[str] | DimensionGroup | DimensionGraph | None = None,
159 graph: DimensionGraph | None = None,
160 universe: DimensionUniverse | None = None,
161 defaults: DataCoordinate | None = None,
162 **kwargs: Any,
163 ) -> DataCoordinate:
164 """Standardize the supplied dataId.
166 Adapts an arbitrary mapping and/or additional arguments into a true
167 `DataCoordinate`, or augment an existing one.
169 Parameters
170 ----------
171 mapping : `~collections.abc.Mapping`, optional
172 An informal data ID that maps dimensions or dimension names to
173 their primary key values (may also be a true `DataCoordinate`).
174 dimensions : `~collections.abc.Iterable` [ `str` ], `DimensionGroup` \
175 or `DimensionGraph`, optional
176 The dimensions to be identified by the new `DataCoordinate`. If not
177 provided, will be inferred from the keys of ``mapping`` and
178 ``**kwargs``, and ``universe`` must be provided unless ``mapping``
179 is already a `DataCoordinate`.
180 graph : `DimensionGraph`, optional
181 Like ``dimensions``, but requires a ``DimensionGraph`` instance.
182 Ignored if ``dimensions`` is provided. Deprecated and will be
183 removed after v27.
184 universe : `DimensionUniverse`
185 All known dimensions and their relationships; used to expand and
186 validate dependencies when ``graph`` is not provided.
187 defaults : `DataCoordinate`, optional
188 Default dimension key-value pairs to use when needed. These are
189 never used to infer ``graph``, and are ignored if a different value
190 is provided for the same key in ``mapping`` or `**kwargs``.
191 **kwargs
192 Additional keyword arguments are treated like additional key-value
193 pairs in ``mapping``.
195 Returns
196 -------
197 coordinate : `DataCoordinate`
198 A validated `DataCoordinate` instance.
200 Raises
201 ------
202 TypeError
203 Raised if the set of optional arguments provided is not supported.
204 KeyError
205 Raised if a key-value pair for a required dimension is missing.
206 """
207 universe = (
208 universe
209 or getattr(dimensions, "universe", None)
210 or getattr(graph, "universe", None)
211 or getattr(mapping, "universe", None)
212 )
213 if universe is None:
214 raise TypeError(
215 "universe must be provided, either directly or via dimensions, mapping, or graph."
216 )
217 if graph is not None:
218 # TODO: remove argument on DM-41326.
219 warnings.warn(
220 "The 'graph' argument to DataCoordinate.standardize is deprecated in favor of the "
221 "'dimensions' argument, and will be removed after v27.",
222 category=FutureWarning,
223 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
224 )
225 dimensions = graph.names
226 if dimensions is not None:
227 dimensions = universe.conform(dimensions)
228 del graph # make sure we don't actualy use this below
229 new_mapping: dict[str, DataIdValue] = {}
230 if isinstance(mapping, DataCoordinate):
231 if dimensions is None:
232 if not kwargs:
233 # Already standardized to exactly what we want.
234 return mapping
235 elif kwargs.keys().isdisjoint(dimensions.names):
236 # User provided kwargs, but told us not to use them by
237 # passing in dimensions that are disjoint from those kwargs.
238 # This is not necessarily user error - it's a useful pattern
239 # to pass in all of the key-value pairs you have and let the
240 # code here pull out only what it needs.
241 return mapping.subset(dimensions.names)
242 new_mapping.update((name, mapping[name]) for name in mapping.dimensions.required)
243 if mapping.hasFull():
244 new_mapping.update((name, mapping[name]) for name in mapping.dimensions.implied)
245 elif isinstance(mapping, NamedKeyMapping):
246 warnings.warn(
247 "Passing a NamedKeyMapping to DataCoordinate.standardize is deprecated, and will be "
248 "removed after v27.",
249 category=FutureWarning,
250 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
251 )
252 new_mapping.update(mapping.byName())
253 elif mapping is not None:
254 new_mapping.update(mapping)
255 new_mapping.update(kwargs)
256 if dimensions is None:
257 if defaults is not None:
258 universe = defaults.universe
259 elif universe is None:
260 raise TypeError("universe must be provided if graph is not.")
261 dimensions = DimensionGroup(universe, new_mapping.keys())
262 if not dimensions:
263 return DataCoordinate.make_empty(universe)
264 # Some backends cannot handle numpy.int64 type which is a subclass of
265 # numbers.Integral; convert that to int.
266 for k, v in new_mapping.items():
267 if isinstance(v, numbers.Integral):
268 new_mapping[k] = int(v) # type: ignore
269 if defaults is not None:
270 for k, v in defaults.mapping.items():
271 new_mapping.setdefault(k, v)
272 if new_mapping.keys() >= dimensions.names:
273 return DataCoordinate.from_full_values(
274 dimensions, tuple(new_mapping[name] for name in dimensions.data_coordinate_keys)
275 )
276 else:
277 try:
278 values = tuple(new_mapping[name] for name in dimensions.required)
279 except KeyError as err:
280 raise KeyError(f"No value in data ID ({mapping}) for required dimension {err}.") from err
281 return DataCoordinate.from_required_values(dimensions, values)
283 @property
284 @abstractmethod
285 def mapping(self) -> Mapping[str, DataIdValue]:
286 """A mapping view of the data ID with keys for all dimensions it has
287 values for.
288 """
289 raise NotImplementedError()
291 @property
292 @abstractmethod
293 def required(self) -> Mapping[str, DataIdValue]:
294 """A mapping view of the data ID with keys for just its required
295 dimensions.
296 """
297 raise NotImplementedError()
299 @property
300 @abstractmethod
301 def required_values(self) -> tuple[DataIdValue, ...]:
302 """The required values (only) of this data ID as a tuple.
304 Element order is consistent with `required`.
306 In contexts where all data IDs have the same dimensions, comparing and
307 hashing these tuples can be much faster than comparing the original
308 `DataCoordinate` instances.
309 """
310 raise NotImplementedError()
312 @property
313 def full_values(self) -> tuple[DataIdValue, ...]:
314 """The full values (only) of this data ID as a tuple.
316 Element order is consistent with `DimensionGroup.data_coordinate_keys`,
317 i.e. all required dimensions followed by all implied dimensions.
318 """
319 raise ValueError(f"DataCoordinate {self} has only required values.")
321 @staticmethod
322 def makeEmpty(universe: DimensionUniverse) -> DataCoordinate:
323 """Return an empty `DataCoordinate`.
325 It identifies the null set of dimensions.
327 Parameters
328 ----------
329 universe : `DimensionUniverse`
330 Universe to which this null dimension set belongs.
332 Returns
333 -------
334 dataId : `DataCoordinate`
335 A data ID object that identifies no dimensions. `hasFull` and
336 `hasRecords` are guaranteed to return `True`, because both `full`
337 and `records` are just empty mappings.
338 """
339 return DataCoordinate.make_empty(universe)
341 @staticmethod
342 def make_empty(universe: DimensionUniverse) -> DataCoordinate:
343 """Return an empty `DataCoordinate`.
345 It identifies the null set of dimensions.
347 Parameters
348 ----------
349 universe : `DimensionUniverse`
350 Universe to which this null dimension set belongs.
352 Returns
353 -------
354 data_id : `DataCoordinate`
355 A data ID object that identifies no dimensions. `hasFull` and
356 `hasRecords` are guaranteed to return `True`, because both `full`
357 and `records` are just empty mappings.
358 """
359 return _ExpandedTupleDataCoordinate(universe.empty.as_group(), (), {})
361 # TODO: remove on DM-41326.
362 @staticmethod
363 @deprecated(
364 "fromRequiredValues is deprecated in favor of from_required_values, "
365 "which takes a DimensionGroup instead of a DimensionGraph. It will be "
366 "removed after v27.",
367 version="v27",
368 category=FutureWarning,
369 )
370 def fromRequiredValues(graph: DimensionGraph, values: tuple[DataIdValue, ...]) -> DataCoordinate:
371 """Construct a `DataCoordinate` from required dimension values.
373 This method is deprecated in favor of `from_required_values`.
375 This is a low-level interface with at most assertion-level checking of
376 inputs. Most callers should use `standardize` instead.
378 Parameters
379 ----------
380 graph : `DimensionGraph`
381 Dimensions this data ID will identify.
382 values : `tuple` [ `int` or `str` ]
383 Tuple of primary key values corresponding to ``graph.required``,
384 in that order.
386 Returns
387 -------
388 dataId : `DataCoordinate`
389 A data ID object that identifies the given dimensions.
390 ``dataId.hasFull()`` will return `True` only if ``graph.implied``
391 is empty. ``dataId.hasRecords()`` will return `True`
392 if and only if ``graph`` is empty.
393 """
394 return DataCoordinate.from_required_values(graph._group, values)
396 @staticmethod
397 def from_required_values(dimensions: DimensionGroup, values: tuple[DataIdValue, ...]) -> DataCoordinate:
398 """Construct a `DataCoordinate` from required dimension values.
400 This is a low-level interface with at most assertion-level checking of
401 inputs. Most callers should use `standardize` instead.
403 Parameters
404 ----------
405 dimensions : `DimensionGroup`
406 Dimensions this data ID will identify.
407 values : `tuple` [ `int` or `str` ]
408 Tuple of primary key values corresponding to ``graph.required``, in
409 that order.
411 Returns
412 -------
413 data_id : `DataCoordinate`
414 A data ID object that identifies the given dimensions.
415 ``dataId.hasFull()`` will return `True` only if
416 ``dimensions.implied`` is empty. ``dataId.hasRecords()`` will
417 return `True` if and only if ``graph`` is empty.
418 """
419 assert len(dimensions.required) == len(
420 values
421 ), f"Inconsistency between dimensions {dimensions.required} and required values {values}."
422 if not dimensions:
423 return DataCoordinate.make_empty(dimensions.universe)
424 if not dimensions.implied:
425 return _FullTupleDataCoordinate(dimensions, values)
426 return _RequiredTupleDataCoordinate(dimensions, values)
428 # TODO: remove on DM-41326.
429 @staticmethod
430 @deprecated(
431 "fromFullValues is deprecated in favor of from_full_values, "
432 "which takes a DimensionGroup instead of a DimensionGraph. It will be "
433 "removed after v27.",
434 version="v27",
435 category=FutureWarning,
436 )
437 def fromFullValues(graph: DimensionGraph, values: tuple[DataIdValue, ...]) -> DataCoordinate:
438 """Construct a `DataCoordinate` from all dimension values.
440 This method is deprecated in favor of `from_full_values`.
442 This is a low-level interface with at most assertion-level checking of
443 inputs. Most callers should use `standardize` instead.
445 Parameters
446 ----------
447 graph : `DimensionGraph`
448 Dimensions this data ID will identify.
449 values : `tuple` [ `int` or `str` ]
450 Tuple of primary key values corresponding to
451 ``itertools.chain(graph.required, graph.implied)``, in that order.
452 Note that this is _not_ the same order as ``graph.dimensions``,
453 though these contain the same elements.
455 Returns
456 -------
457 dataId : `DataCoordinate`
458 A data ID object that identifies the given dimensions.
459 ``dataId.hasFull()`` will always return `True`.
460 ``dataId.hasRecords()`` will only return `True` if ``graph`` is
461 empty.
462 """
463 return DataCoordinate.from_full_values(graph._group, values)
465 @staticmethod
466 def from_full_values(dimensions: DimensionGroup, values: tuple[DataIdValue, ...]) -> DataCoordinate:
467 """Construct a `DataCoordinate` from all dimension values.
469 This is a low-level interface with at most assertion-level checking of
470 inputs. Most callers should use `standardize` instead.
472 Parameters
473 ----------
474 dimensions : `DimensionGroup`
475 Dimensions this data ID will identify.
476 values : `tuple` [ `int` or `str` ]
477 Tuple of primary key values corresponding to
478 ``itertools.chain(graph.required, graph.implied)``, in that order.
479 Note that this is _not_ the same order as ``graph.dimensions``,
480 though these contain the same elements.
482 Returns
483 -------
484 data_id : `DataCoordinate`
485 A data ID object that identifies the given dimensions.
486 ``dataId.hasFull()`` will always return `True`.
487 ``dataId.hasRecords()`` will only return `True` if ``dimensions``
488 is empty.
489 """
490 assert len(dimensions) == len(
491 values
492 ), f"Inconsistency between dimensions {dimensions.data_coordinate_keys} and full values {values}."
493 if not dimensions:
494 return DataCoordinate.make_empty(dimensions.universe)
495 return _FullTupleDataCoordinate(dimensions, values)
497 def __bool__(self) -> bool:
498 return bool(self.dimensions)
500 def __hash__(self) -> int:
501 return hash((self.dimensions,) + self.required_values)
503 def __eq__(self, other: Any) -> bool:
504 if not isinstance(other, DataCoordinate):
505 other = DataCoordinate.standardize(other, universe=self.universe)
506 return self.dimensions == other.dimensions and self.required_values == other.required_values
508 def __repr__(self) -> str:
509 # We can't make repr yield something that could be exec'd here without
510 # printing out the whole DimensionUniverse.
511 return str(self.mapping)
513 def __lt__(self, other: Any) -> bool:
514 if not isinstance(other, DataCoordinate):
515 return NotImplemented
516 # Unlike repr() we only use required keys here to ensure that __eq__
517 # can not be true simultaneously with __lt__ being true.
518 return self.required_values < other.required_values
520 # TODO: remove on DM-41326.
521 @deprecated(
522 "Using DataCoordinate as a Mapping is deprecated in favor of the "
523 ".mapping and .required attributes, and will be dropped after v27.",
524 version="v27",
525 category=FutureWarning,
526 )
527 def __iter__(self) -> Iterator[Dimension]:
528 return iter(self.keys())
530 # TODO: remove on DM-41326.
531 @deprecated(
532 "Using DataCoordinate as a Mapping is deprecated in favor of the "
533 ".mapping and .required attributes, and will be dropped after v27.",
534 version="v27",
535 category=FutureWarning,
536 )
537 def __len__(self) -> int:
538 return len(self.keys())
540 # TODO: remove on DM-41326.
541 @deprecated(
542 "Using DataCoordinate as a Mapping is deprecated in favor of the "
543 ".mapping and .required attributes, and will be dropped after v27.",
544 version="v27",
545 category=FutureWarning,
546 )
547 def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
548 return self.graph.required
550 # TODO: remove on DM-41326.
551 @property
552 @deprecated(
553 "DataCoordinate.names is deprecated in favor of the .dimensions "
554 "attribute, and will be dropped after v27.",
555 version="v27",
556 category=FutureWarning,
557 )
558 def names(self) -> Set[str]:
559 """Names of the required dimensions identified by this data ID.
561 They are returned in the same order as `keys`
562 (`collections.abc.Set` [ `str` ]).
563 """
564 return self.keys().names
566 @abstractmethod
567 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
568 """Return a `DataCoordinate` whose graph is a subset of ``self.graph``.
570 Parameters
571 ----------
572 dimensions : `DimensionGraph`, `DimensionGroup`, or \
573 `~collections.abc.Iterable` [ `str` ]
574 The dimensions identified by the returned `DataCoordinate`.
575 Passing a `DimensionGraph` is deprecated and support will be
576 dropped after v27.
578 Returns
579 -------
580 coordinate : `DataCoordinate`
581 A `DataCoordinate` instance that identifies only the given
582 dimensions. May be ``self`` if ``graph == self.graph``.
584 Raises
585 ------
586 KeyError
587 Raised if the primary key value for one or more required dimensions
588 is unknown. This may happen even if the required subset of the new
589 dimensions are not a subset of the dimensions actually known by
590 this data ID.. As an example, consider trying to go from a data ID
591 with dimensions {instrument, physical_filter, band} to just
592 {instrument, band}; band is implied by physical_filter and hence
593 would have no value in the original data ID if ``self.hasFull()``
594 is `False`.
596 Notes
597 -----
598 If `hasFull` and `hasRecords` return `True` on ``self``, they will
599 return `True` (respectively) on the returned `DataCoordinate` as well.
600 The converse does not hold.
601 """
602 # TODO: update docs r.e. deprecation on DM-41326.
603 raise NotImplementedError()
605 @abstractmethod
606 def union(self, other: DataCoordinate) -> DataCoordinate:
607 """Combine two data IDs.
609 Yields a new one that identifies all dimensions that either of them
610 identify.
612 Parameters
613 ----------
614 other : `DataCoordinate`
615 Data ID to combine with ``self``.
617 Returns
618 -------
619 unioned : `DataCoordinate`
620 A `DataCoordinate` instance that satisfies
621 ``unioned.dimensions == self.dimensions.union(other.dimensions)``.
622 Will preserve ``hasFull`` and ``hasRecords`` whenever possible.
624 Notes
625 -----
626 No checking for consistency is performed on values for keys that
627 ``self`` and ``other`` have in common, and which value is included in
628 the returned data ID is not specified.
629 """
630 raise NotImplementedError()
632 @abstractmethod
633 def expanded(
634 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
635 ) -> DataCoordinate:
636 """Return a `DataCoordinate` that holds the given records.
638 Guarantees that `hasRecords` returns `True`.
640 This is a low-level interface with at most assertion-level checking of
641 inputs. Most callers should use `Registry.expandDataId` instead.
643 Parameters
644 ----------
645 records : `~collections.abc.Mapping` [ `str`, `DimensionRecord` or \
646 `None` ]
647 A `NamedKeyMapping` with `DimensionElement` keys or a regular
648 `~collections.abc.Mapping` with `str` (`DimensionElement` name)
649 keys and `DimensionRecord` values. Keys must cover all elements in
650 ``self.graph.elements``. Values may be `None`, but only to reflect
651 actual NULL values in the database, not just records that have not
652 been fetched. Passing a `NamedKeyMapping` is deprecated and will
653 not be supported after v27.
654 """
655 # TODO: update docs r.e. deprecation on DM-41326.
656 raise NotImplementedError()
658 @property
659 def universe(self) -> DimensionUniverse:
660 """Universe that defines all known compatible dimensions.
662 The universe will be compatible with this coordinate
663 (`DimensionUniverse`).
664 """
665 return self.dimensions.universe
667 @property
668 @abstractmethod
669 def dimensions(self) -> DimensionGroup:
670 """Dimensions identified by this data ID (`DimensionGroup`).
672 Note that values are only required to be present for dimensions in
673 ``self.dimensions.required``; all others may be retrieved (from a
674 `Registry`) given these.
675 """
676 raise NotImplementedError()
678 # TODO: remove on DM-41326.
679 @property
680 @deprecated(
681 "DataCoordinate.graph is deprecated in favor of .dimensions, and will be dropped after v27.",
682 version="v27",
683 category=FutureWarning,
684 )
685 def graph(self) -> DimensionGraph:
686 """Dimensions identified by this data ID (`DimensionGraph`).
688 Note that values are only required to be present for dimensions in
689 ``self.graph.required``; all others may be retrieved (from a
690 `Registry`) given these.
691 """
692 return self.dimensions._as_graph()
694 @abstractmethod
695 def hasFull(self) -> bool:
696 """Whether this data ID contains implied and required values.
698 Returns
699 -------
700 state : `bool`
701 If `True`, `__getitem__`, `get`, and `__contains__` (but not
702 `keys`!) will act as though the mapping includes key-value pairs
703 for implied dimensions, and the `full` property may be used. If
704 `False`, these operations only include key-value pairs for required
705 dimensions, and accessing `full` is an error. Always `True` if
706 there are no implied dimensions.
707 """
708 raise NotImplementedError()
710 # TODO: remove on DM-41326.
711 @property
712 @deprecated(
713 "DataCoordinate.full is deprecated in favor of .mapping, and will be dropped after v27.",
714 version="v27",
715 category=FutureWarning,
716 )
717 @abstractmethod
718 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
719 """Return mapping for all dimensions in ``self.dimensions``.
721 The mapping includes key-value pairs for all dimensions in
722 ``self.dimensions``, including implied.
724 Accessing this attribute if `hasFull` returns `False` is a logic error
725 that may raise an exception of unspecified type either immediately or
726 when implied keys are accessed via the returned mapping, depending on
727 the implementation and whether assertions are enabled.
728 """
729 raise NotImplementedError()
731 # TODO: remove on DM-41326.
732 @deprecated(
733 "DataCoordinate.values_tuple() is deprecated in favor of .required_values, and will be dropped "
734 "after v27.",
735 version="v27",
736 category=FutureWarning,
737 )
738 def values_tuple(self) -> tuple[DataIdValue, ...]:
739 """Return the required values (only) of this data ID as a tuple.
741 In contexts where all data IDs have the same dimensions, comparing and
742 hashing these tuples can be *much* faster than comparing the original
743 `DataCoordinate` instances.
744 """
745 return self.required_values
747 @abstractmethod
748 def hasRecords(self) -> bool:
749 """Whether this data ID contains records.
751 These are the records for all of the dimension elements it identifies.
753 Returns
754 -------
755 state : `bool`
756 If `True`, the following attributes may be accessed:
758 - `records`
759 - `region`
760 - `timespan`
761 - `pack`
763 If `False`, accessing any of these is considered a logic error.
764 """
765 raise NotImplementedError()
767 @property
768 def records(self) -> NamedKeyMapping[DimensionElement, DimensionRecord | None]:
769 """A mapping that contains `DimensionRecord` objects for all
770 elements identified by this data ID.
772 This mapping will become a regular `~collections.abc.Mapping` with
773 `str` keys after v27.
775 Notes
776 -----
777 The values of this mapping may be `None` if and only if there is no
778 record for that element with these dimensions in the database (which
779 means some foreign key field must have a NULL value).
781 Accessing this attribute if `hasRecords` returns `False` is a logic
782 error that may raise an exception of unspecified type either
783 immediately or when the returned mapping is used, depending on the
784 implementation and whether assertions are enabled.
785 """
786 assert self.hasRecords(), "records may only be accessed if hasRecords() returns True."
787 return _DataCoordinateRecordsView(self)
789 @abstractmethod
790 def _record(self, name: str) -> DimensionRecord | None:
791 """Protected implementation hook that backs the ``records`` attribute.
793 Parameters
794 ----------
795 name : `str`
796 The name of a `DimensionElement`, guaranteed to be in
797 ``self.dimensions.elements``.
799 Returns
800 -------
801 record : `DimensionRecord` or `None`
802 The dimension record for the given element identified by this
803 data ID, or `None` if there is no such record.
804 """
805 raise NotImplementedError()
807 @property
808 def region(self) -> Region | None:
809 """Spatial region associated with this data ID.
811 (`lsst.sphgeom.Region` or `None`).
813 This is `None` if and only if ``self.dimensions.spatial`` is empty.
815 Accessing this attribute if `hasRecords` returns `False` is a logic
816 error that may or may not raise an exception, depending on the
817 implementation and whether assertions are enabled.
818 """
819 assert self.hasRecords(), "region may only be accessed if hasRecords() returns True."
820 regions = []
821 for family in self.dimensions.spatial:
822 element = family.choose(self.dimensions.elements, self.universe)
823 record = self._record(element.name)
824 if record is None or record.region is None:
825 return None
826 else:
827 regions.append(record.region)
828 return _intersectRegions(*regions)
830 @property
831 def timespan(self) -> Timespan | None:
832 """Temporal interval associated with this data ID.
834 (`Timespan` or `None`).
836 This is `None` if and only if ``self.dimensions.temporal`` is empty.
838 Accessing this attribute if `hasRecords` returns `False` is a logic
839 error that may or may not raise an exception, depending on the
840 implementation and whether assertions are enabled.
841 """
842 assert self.hasRecords(), "timespan may only be accessed if hasRecords() returns True."
843 timespans = []
844 for family in self.dimensions.temporal:
845 element = family.choose(self.dimensions.elements, self.universe)
846 record = self._record(element.name)
847 # DimensionRecord subclasses for temporal elements always have
848 # .timespan, but they're dynamic so this can't be type-checked.
849 if record is None or record.timespan is None:
850 return None
851 else:
852 timespans.append(record.timespan)
853 if not timespans:
854 return None
855 elif len(timespans) == 1:
856 return timespans[0]
857 else:
858 return Timespan.intersection(*timespans)
860 @overload
861 def pack(self, name: str, *, returnMaxBits: Literal[True]) -> tuple[int, int]:
862 ...
864 @overload
865 def pack(self, name: str, *, returnMaxBits: Literal[False]) -> int:
866 ...
868 # TODO: Remove this method and its overloads above on DM-38687.
869 @deprecated(
870 "Deprecated in favor of configurable dimension packers. Will be removed after v26.",
871 version="v26",
872 category=FutureWarning,
873 )
874 def pack(self, name: str, *, returnMaxBits: bool = False) -> tuple[int, int] | int:
875 """Pack this data ID into an integer.
877 Parameters
878 ----------
879 name : `str`
880 Name of the `DimensionPacker` algorithm (as defined in the
881 dimension configuration).
882 returnMaxBits : `bool`, optional
883 If `True` (`False` is default), return the maximum number of
884 nonzero bits in the returned integer across all data IDs.
886 Returns
887 -------
888 packed : `int`
889 Integer ID. This ID is unique only across data IDs that have
890 the same values for the packer's "fixed" dimensions.
891 maxBits : `int`, optional
892 Maximum number of nonzero bits in ``packed``. Not returned unless
893 ``returnMaxBits`` is `True`.
895 Notes
896 -----
897 Accessing this attribute if `hasRecords` returns `False` is a logic
898 error that may or may not raise an exception, depending on the
899 implementation and whether assertions are enabled.
900 """
901 assert self.hasRecords(), "pack() may only be called if hasRecords() returns True."
902 return self.universe.makePacker(name, self).pack(self, returnMaxBits=returnMaxBits)
904 def to_simple(self, minimal: bool = False) -> SerializedDataCoordinate:
905 """Convert this class to a simple python type.
907 This is suitable for serialization.
909 Parameters
910 ----------
911 minimal : `bool`, optional
912 Use minimal serialization. If set the records will not be attached.
914 Returns
915 -------
916 simple : `SerializedDataCoordinate`
917 The object converted to simple form.
918 """
919 # Convert to a dict form
920 records: dict[str, SerializedDimensionRecord] | None
921 if not minimal and self.hasRecords():
922 records = {
923 k: v.to_simple() for k in self.dimensions.elements if (v := self.records[k]) is not None
924 }
925 else:
926 records = None
928 return SerializedDataCoordinate(dataId=dict(self.mapping), records=records)
930 @classmethod
931 def from_simple(
932 cls,
933 simple: SerializedDataCoordinate,
934 universe: DimensionUniverse | None = None,
935 registry: Registry | None = None,
936 ) -> DataCoordinate:
937 """Construct a new object from the simplified form.
939 The data is assumed to be of the form returned from the `to_simple`
940 method.
942 Parameters
943 ----------
944 simple : `dict` of [`str`, `Any`]
945 The `dict` returned by `to_simple()`.
946 universe : `DimensionUniverse`
947 Object that manages all known dimensions.
948 registry : `lsst.daf.butler.Registry`, optional
949 Registry from which a universe can be extracted. Can be `None`
950 if universe is provided explicitly.
952 Returns
953 -------
954 dataId : `DataCoordinate`
955 Newly-constructed object.
956 """
957 key = (frozenset(simple.dataId.items()), simple.records is not None)
958 cache = PersistenceContextVars.dataCoordinates.get()
959 if cache is not None and (result := cache.get(key)) is not None:
960 return result
961 if universe is None and registry is None:
962 raise ValueError("One of universe or registry is required to convert a dict to a DataCoordinate")
963 if universe is None and registry is not None:
964 universe = registry.dimensions
965 if universe is None:
966 # this is for mypy
967 raise ValueError("Unable to determine a usable universe")
969 dataId = cls.standardize(simple.dataId, universe=universe)
970 if simple.records:
971 dataId = dataId.expanded(
972 {k: DimensionRecord.from_simple(v, universe=universe) for k, v in simple.records.items()}
973 )
974 if cache is not None:
975 cache[key] = dataId
976 return dataId
978 to_json = to_json_pydantic
979 from_json: ClassVar = classmethod(from_json_pydantic)
982DataId = DataCoordinate | Mapping[str, Any]
983"""A type-annotation alias for signatures that accept both informal data ID
984dictionaries and validated `DataCoordinate` instances.
985"""
988# Deprecated by having its only public access (DataCoordinate.full) deprecated.
989# TODO: remove on DM-41326.
990class _DataCoordinateFullView(NamedKeyMapping[Dimension, DataIdValue]):
991 """View class for `DataCoordinate.full`.
993 Provides the default implementation for
994 `DataCoordinate.full`.
996 Parameters
997 ----------
998 target : `DataCoordinate`
999 The `DataCoordinate` instance this object provides a view of.
1000 """
1002 def __init__(self, target: _BasicTupleDataCoordinate):
1003 self._target = target
1005 __slots__ = ("_target",)
1007 def __repr__(self) -> str:
1008 return repr(self._target)
1010 def __getitem__(self, key: DataIdKey) -> DataIdValue:
1011 return self._target[key]
1013 def __iter__(self) -> Iterator[Dimension]:
1014 return iter(self.keys())
1016 def __len__(self) -> int:
1017 return len(self.keys())
1019 def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
1020 return self._target.graph.dimensions
1022 @property
1023 def names(self) -> Set[str]:
1024 # Docstring inherited from `NamedKeyMapping`.
1025 return self.keys().names
1028# TODO: Make a Mapping[str, DimensionRecord | None] on DM-41326.
1029class _DataCoordinateRecordsView(NamedKeyMapping[DimensionElement, DimensionRecord | None]):
1030 """View class for `DataCoordinate.records`.
1032 Provides the default implementation for
1033 `DataCoordinate.records`.
1035 Parameters
1036 ----------
1037 target : `DataCoordinate`
1038 The `DataCoordinate` instance this object provides a view of.
1039 """
1041 def __init__(self, target: DataCoordinate):
1042 self._target = target
1044 __slots__ = ("_target",)
1046 def __repr__(self) -> str:
1047 terms = [f"{d}: {self[d]!r}" for d in self._target.graph.elements.names]
1048 return "{{{}}}".format(", ".join(terms))
1050 def __str__(self) -> str:
1051 return "\n".join(str(v) for v in self.values())
1053 def __getitem__(self, key: DimensionElement | str) -> DimensionRecord | None:
1054 if isinstance(key, DimensionElement):
1055 warnings.warn(
1056 "Using Dimension keys in DataCoordinate is deprecated and will not be supported after v27.",
1057 category=FutureWarning,
1058 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1059 )
1060 key = key.name
1061 return self._target._record(key)
1063 # TODO: fix on DM-41326.
1064 @deprecated(
1065 "Iteration over DataCoordinate.records is deprecated as the key type will change to 'str' after "
1066 "v27. Use DataCoordinate.dimensions.elements to get the names of all dimension elements instead.",
1067 version="v27",
1068 category=FutureWarning,
1069 )
1070 def __iter__(self) -> Iterator[DimensionElement]:
1071 return iter(self.keys())
1073 def __len__(self) -> int:
1074 return len(self.keys())
1076 # TODO: remove on DM-41326.
1077 # Deprecation warning will come from using .graph.
1078 def keys(self) -> NamedValueAbstractSet[DimensionElement]: # type: ignore
1079 return self._target.graph.elements
1081 @property
1082 @deprecated(
1083 "DataCoordinate.records.names is deprecated in favor of DataCoordinate.dimensions.elements and "
1084 "will be removed after v27.",
1085 version="v27",
1086 category=FutureWarning,
1087 )
1088 def names(self) -> Set[str]:
1089 # Docstring inherited from `NamedKeyMapping`.
1090 return self.keys().names
1093class _BasicTupleDataCoordinate(DataCoordinate):
1094 """Intermediate base class for the standard implementation of
1095 `DataCoordinate`.
1097 This class should only be accessed outside this module via the
1098 `DataCoordinate` interface, and should only be constructed via the static
1099 methods there.
1101 Parameters
1102 ----------
1103 dimensions : `DimensionGroup`
1104 The dimensions to be identified.
1105 values : `tuple` [ `int` or `str` ]
1106 Data ID values, ordered to match
1107 ``dimensions.data_coordinate_keys``. May include values for just
1108 required dimensions (which always come first) or all dimensions
1109 (concrete subclasses implementations will care which).
1110 """
1112 def __init__(self, dimensions: DimensionGroup, values: tuple[DataIdValue, ...]):
1113 self._dimensions = dimensions
1114 self._values = values
1116 __slots__ = ("_dimensions", "_values")
1118 @property
1119 def dimensions(self) -> DimensionGroup:
1120 # Docstring inherited from DataCoordinate.
1121 return self._dimensions
1123 @property
1124 def required(self) -> Mapping[str, DataIdValue]:
1125 # Docstring inherited from DataCoordinate.
1126 return _DataCoordinateRequiredMappingView(self)
1128 def __getitem__(self, key: DataIdKey) -> DataIdValue:
1129 # Docstring inherited from DataCoordinate.
1130 # TODO: remove on DM-41326.
1131 if isinstance(key, Dimension):
1132 warnings.warn(
1133 "Using Dimension keys in DataCoordinate is deprecated and will not be supported after v27.",
1134 category=FutureWarning,
1135 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1136 )
1137 key = key.name
1138 index = self._dimensions._data_coordinate_indices[key]
1139 try:
1140 return self._values[index]
1141 except IndexError:
1142 # Caller asked for an implied dimension, but this object only has
1143 # values for the required ones.
1144 raise KeyError(key) from None
1146 # TODO: remove on DM-41326.
1147 @deprecated(
1148 "Using DataCoordinate as a NamedKeyMapping is deprecated in favor of the "
1149 ".mapping and .required attributes, and will be dropped after v27. "
1150 "Use `dict(data_id.required)` as an exact replacement for `data_id.byName()`.",
1151 version="v27",
1152 category=FutureWarning,
1153 )
1154 def byName(self) -> dict[str, DataIdValue]:
1155 # Docstring inheritance.
1156 # Reimplementation is for optimization; `required_values` is much
1157 # faster to iterate over than values() because it doesn't go through
1158 # `__getitem__`.
1159 return dict(zip(self.names, self.required_values, strict=True))
1161 def hasRecords(self) -> bool:
1162 # Docstring inherited from DataCoordinate.
1163 return False
1165 def _record(self, name: str) -> DimensionRecord | None:
1166 # Docstring inherited from DataCoordinate.
1167 raise AssertionError()
1169 def __getattr__(self, name: str) -> Any:
1170 if name in self.dimensions.elements:
1171 raise AttributeError(
1172 f"Dimension record attribute {name!r} is only available on expanded DataCoordinates."
1173 )
1174 raise AttributeError(name)
1177class _DataCoordinateRequiredMappingView(Mapping[str, DataIdValue]):
1178 """A DataCoordinate Mapping view class whose keys are just the required
1179 dimensions.
1180 """
1182 def __init__(self, target: DataCoordinate):
1183 self._target = target
1185 __slots__ = ("_target",)
1187 def __getitem__(self, key: str) -> DataIdValue:
1188 if key not in self._target.dimensions.required:
1189 raise KeyError(key)
1190 return self._target[key]
1192 def __len__(self) -> int:
1193 return len(self._target.dimensions.required)
1195 def __iter__(self) -> Iterator[str]:
1196 return iter(self._target.dimensions.required)
1198 def __repr__(self) -> str:
1199 return f"{{{', '.join(f'{k}: {v!r}' for k, v in self.items())}}}"
1202class _DataCoordinateFullMappingView(Mapping[str, DataIdValue]):
1203 """A DataCoordinate Mapping view class whose keys are all dimensions."""
1205 def __init__(self, target: DataCoordinate):
1206 self._target = target
1208 __slots__ = ("_target",)
1210 def __getitem__(self, key: str) -> DataIdValue:
1211 return self._target[key]
1213 def __len__(self) -> int:
1214 return len(self._target.dimensions)
1216 def __iter__(self) -> Iterator[str]:
1217 return iter(self._target.dimensions.data_coordinate_keys)
1219 def __repr__(self) -> str:
1220 return f"{{{', '.join(f'{k}: {v!r}' for k, v in self.items())}}}"
1223class _RequiredTupleDataCoordinate(_BasicTupleDataCoordinate):
1224 """A `DataCoordinate` implementation that has values for required
1225 dimensions only, when implied dimensions already exist.
1227 Note that `_FullTupleDataCoordinate` should be used if there are no
1228 implied dimensions.
1230 This class should only be accessed outside this module via the
1231 `DataCoordinate` interface, and should only be constructed via calls to
1232 `DataCoordinate.from_full_values`.
1233 """
1235 __slots__ = ()
1237 @property
1238 def mapping(self) -> Mapping[str, DataIdValue]:
1239 # Docstring inherited from DataCoordinate.
1240 return _DataCoordinateRequiredMappingView(self)
1242 @property
1243 def required_values(self) -> tuple[DataIdValue, ...]:
1244 # Docstring inherited from DataCoordinate.
1245 return self._values
1247 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1248 # Docstring inherited from DataCoordinate.
1249 dimensions = self.universe.conform(dimensions)
1250 if self._dimensions == dimensions:
1251 return self
1252 elif self._dimensions.required >= dimensions.names:
1253 return DataCoordinate.from_full_values(
1254 dimensions,
1255 tuple(self[k] for k in dimensions.data_coordinate_keys),
1256 )
1257 else:
1258 return DataCoordinate.from_required_values(
1259 dimensions, tuple(self[k] for k in dimensions.required)
1260 )
1262 def union(self, other: DataCoordinate) -> DataCoordinate:
1263 # Docstring inherited from DataCoordinate.
1264 dimensions = self.dimensions.union(other.dimensions)
1265 # See if the other one is already what we want to return. We don't
1266 # shortcut-return 'self' because `other` might have full values or
1267 # even records, and we want to return the more complete data ID.
1268 if other.dimensions == dimensions:
1269 return other
1270 # General case with actual merging of dictionaries.
1271 values = dict(self.mapping)
1272 values.update(other.mapping)
1273 return DataCoordinate.standardize(values, dimensions=dimensions)
1275 # TODO: remove on DM-41326.
1276 @property
1277 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
1278 # Docstring inherited.
1279 raise AssertionError("full may only be accessed if hasFull() returns True.")
1281 def expanded(
1282 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1283 ) -> DataCoordinate:
1284 # Docstring inherited from DataCoordinate
1285 # Extract a complete values tuple from the attributes of the given
1286 # records. It's possible for these to be inconsistent with
1287 # self._values (which is a serious problem, of course), but we've
1288 # documented this as a no-checking API.
1289 values = self._values + tuple(
1290 getattr(records[d], cast(Dimension, self.universe[d]).primaryKey.name)
1291 for d in self._dimensions.implied
1292 )
1293 if isinstance(records, NamedKeyMapping):
1294 warnings.warn(
1295 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1296 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1297 category=FutureWarning,
1298 )
1299 return _ExpandedTupleDataCoordinate(self._dimensions, values, records)
1301 def hasFull(self) -> bool:
1302 # Docstring inherited from DataCoordinate.
1303 return False
1305 def __reduce__(self) -> tuple[Any, ...]:
1306 return (_RequiredTupleDataCoordinate, (self._dimensions, self._values))
1309class _FullTupleDataCoordinate(_BasicTupleDataCoordinate):
1310 """A `DataCoordinate` implementation that has values for all dimensions.
1312 This class should only be accessed outside this module via the
1313 `DataCoordinate` interface, and should only be constructed via calls to
1314 `DataCoordinate.from_full_values`.
1315 """
1317 __slots__ = ()
1319 @property
1320 def mapping(self) -> Mapping[str, DataIdValue]:
1321 # Docstring inherited from DataCoordinate.
1322 return _DataCoordinateFullMappingView(self)
1324 @property
1325 def required_values(self) -> tuple[DataIdValue, ...]:
1326 # Docstring inherited from DataCoordinate.
1327 return self._values[: len(self._dimensions.required)]
1329 @property
1330 def full_values(self) -> tuple[DataIdValue, ...]:
1331 # Docstring inherited from DataCoordinate.
1332 return self._values
1334 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1335 # Docstring inherited from DataCoordinate.
1336 dimensions = self.universe.conform(dimensions)
1337 if self._dimensions == dimensions:
1338 return self
1339 return DataCoordinate.from_full_values(
1340 dimensions,
1341 tuple(self[k] for k in dimensions.data_coordinate_keys),
1342 )
1344 def union(self, other: DataCoordinate) -> DataCoordinate:
1345 # Docstring inherited from DataCoordinate.
1346 dimensions = self.dimensions.union(other.dimensions)
1347 # See if one or both input data IDs is already what we want to return;
1348 # if so, return the most complete one we have.
1349 if other.dimensions == dimensions and other.hasRecords():
1350 return other
1351 elif self.dimensions == dimensions and not other.hasRecords():
1352 return self
1353 # General case with actual merging of dictionaries.
1354 values = dict(self.mapping)
1355 values.update(other.mapping)
1356 return DataCoordinate.standardize(values, dimensions=dimensions)
1358 # TODO: remove on DM-41326.
1359 @property
1360 @deprecated(
1361 "DataCoordinate.full is deprecated in favor of .mapping, and will be dropped after v27.",
1362 version="v27",
1363 category=FutureWarning,
1364 )
1365 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
1366 # Docstring inherited.
1367 return _DataCoordinateFullView(self)
1369 def expanded(
1370 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1371 ) -> DataCoordinate:
1372 # Docstring inherited from DataCoordinate
1373 if isinstance(records, NamedKeyMapping):
1374 warnings.warn(
1375 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1376 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1377 category=FutureWarning,
1378 )
1379 return _ExpandedTupleDataCoordinate(self._dimensions, self._values, records)
1381 def hasFull(self) -> bool:
1382 # Docstring inherited from DataCoordinate.
1383 return True
1385 def __reduce__(self) -> tuple[Any, ...]:
1386 return (_FullTupleDataCoordinate, (self._dimensions, self._values))
1389class _ExpandedTupleDataCoordinate(_FullTupleDataCoordinate):
1390 """A `DataCoordinate` implementation that directly holds `DimensionRecord`
1391 objects relevant to it.
1393 This class should only be accessed outside this module via the
1394 `DataCoordinate` interface, and should only be constructed via calls to
1395 `DataCoordinate.expanded`.
1397 Parameters
1398 ----------
1399 dimensions : `DimensionGroup`
1400 The dimensions to be identified.
1401 values : `tuple` [ `int` or `str` ]
1402 Data ID values, ordered to match
1403 ``dimensions._data_coordinate_indices``. Just include values for all
1404 dimensions.
1405 records : `~collections.abc.Mapping` [ `str`, `DimensionRecord` or `None` ]
1406 A `NamedKeyMapping` with `DimensionElement` keys or a regular
1407 `~collections.abc.Mapping` with `str` (`DimensionElement` name) keys
1408 and `DimensionRecord` values. Keys must cover all elements in
1409 ``self.dimensions.elements``. Values may be `None`, but only to
1410 reflect actual NULL values in the database, not just records that have
1411 not been fetched.
1412 """
1414 def __init__(
1415 self,
1416 dimensions: DimensionGroup,
1417 values: tuple[DataIdValue, ...],
1418 records: NameLookupMapping[DimensionElement, DimensionRecord | None],
1419 ):
1420 super().__init__(dimensions, values)
1421 assert super().hasFull(), "This implementation requires full dimension records."
1422 self._records = records
1424 __slots__ = ("_records",)
1426 def subset(self, dimensions: DimensionGraph | DimensionGroup | Iterable[str]) -> DataCoordinate:
1427 # Docstring inherited from DataCoordinate.
1428 return super().subset(dimensions).expanded(self._records)
1430 def expanded(
1431 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1432 ) -> DataCoordinate:
1433 # Docstring inherited from DataCoordinate.
1434 if isinstance(records, NamedKeyMapping):
1435 warnings.warn(
1436 "NamedKeyMappings will not be accepted after v27; pass a Mapping with str keys instead.",
1437 stacklevel=find_outside_stacklevel("lsst.daf.butler"),
1438 category=FutureWarning,
1439 )
1440 return self
1442 def union(self, other: DataCoordinate) -> DataCoordinate:
1443 # Docstring inherited from DataCoordinate.
1444 result = super().union(other)
1445 if not result.hasRecords() and other.hasRecords():
1446 records = {e: self._record(e) for e in self.dimensions.elements} | {
1447 e: other._record(e) for e in other.dimensions.elements
1448 }
1449 if records.keys() >= result.dimensions.elements:
1450 return result.expanded(records)
1451 return result
1453 def hasRecords(self) -> bool:
1454 # Docstring inherited from DataCoordinate.
1455 return True
1457 def _record(self, name: str) -> DimensionRecord | None:
1458 # Docstring inherited from DataCoordinate.
1459 return self._records[name]
1461 def __reduce__(self) -> tuple[Any, ...]:
1462 return (_ExpandedTupleDataCoordinate, (self._dimensions, self._values, self._records))
1464 def __getattr__(self, name: str) -> Any:
1465 try:
1466 return self._record(name)
1467 except KeyError:
1468 raise AttributeError(name) from None
1470 def __dir__(self) -> list[str]:
1471 result = list(super().__dir__())
1472 result.extend(self.dimensions.elements)
1473 return result