Coverage for python/lsst/daf/butler/dimensions/_coordinate.py: 35%
364 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-27 09:44 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-27 09:44 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28#
29# Design notes for this module are in
30# doc/lsst.daf.butler/dev/dataCoordinate.py.
31#
33from __future__ import annotations
35__all__ = ("DataCoordinate", "DataId", "DataIdKey", "DataIdValue", "SerializedDataCoordinate")
37import numbers
38from abc import abstractmethod
39from collections.abc import Iterator, Mapping, Set
40from typing import TYPE_CHECKING, Any, ClassVar, Literal, overload
42from deprecated.sphinx import deprecated
43from lsst.daf.butler._compat import _BaseModelCompat
44from lsst.sphgeom import IntersectionRegion, Region
46from .._named import NamedKeyDict, NamedKeyMapping, NamedValueAbstractSet, NameLookupMapping
47from .._timespan import Timespan
48from ..json import from_json_pydantic, to_json_pydantic
49from ..persistence_context import PersistenceContextVars
50from ._elements import Dimension, DimensionElement
51from ._graph import DimensionGraph
52from ._records import DimensionRecord, SerializedDimensionRecord
54if TYPE_CHECKING: # Imports needed only for type annotations; may be circular.
55 from ..registry import Registry
56 from ._universe import DimensionUniverse
58DataIdKey = str | Dimension
59"""Type annotation alias for the keys that can be used to index a
60DataCoordinate.
61"""
63# Pydantic will cast int to str if str is first in the Union.
64DataIdValue = int | str | None
65"""Type annotation alias for the values that can be present in a
66DataCoordinate or other data ID.
67"""
70class SerializedDataCoordinate(_BaseModelCompat):
71 """Simplified model for serializing a `DataCoordinate`."""
73 dataId: dict[str, DataIdValue]
74 records: dict[str, SerializedDimensionRecord] | None = None
76 @classmethod
77 def direct(
78 cls, *, dataId: dict[str, DataIdValue], records: dict[str, dict] | None
79 ) -> SerializedDataCoordinate:
80 """Construct a `SerializedDataCoordinate` directly without validators.
82 This differs from the pydantic "construct" method in that the arguments
83 are explicitly what the model requires, and it will recurse through
84 members, constructing them from their corresponding `direct` methods.
86 This method should only be called when the inputs are trusted.
87 """
88 key = (frozenset(dataId.items()), records is not None)
89 cache = PersistenceContextVars.serializedDataCoordinateMapping.get()
90 if cache is not None and (result := cache.get(key)) is not None:
91 return result
93 if records is None:
94 serialized_records = None
95 else:
96 serialized_records = {k: SerializedDimensionRecord.direct(**v) for k, v in records.items()}
98 node = cls.model_construct(dataId=dataId, records=serialized_records)
100 if cache is not None:
101 cache[key] = node
102 return node
105def _intersectRegions(*args: Region) -> Region | None:
106 """Return the intersection of several regions.
108 For internal use by `ExpandedDataCoordinate` only.
110 If no regions are provided, returns `None`.
111 """
112 if len(args) == 0:
113 return None
114 else:
115 result = args[0]
116 for n in range(1, len(args)):
117 result = IntersectionRegion(result, args[n])
118 return result
121class DataCoordinate(NamedKeyMapping[Dimension, DataIdValue]):
122 """Data ID dictionary.
124 An immutable data ID dictionary that guarantees that its key-value pairs
125 identify at least all required dimensions in a `DimensionGraph`.
127 `DataCoordinate` itself is an ABC, but provides `staticmethod` factory
128 functions for private concrete implementations that should be sufficient
129 for most purposes. `standardize` is the most flexible and safe of these;
130 the others (`makeEmpty`, `fromRequiredValues`, and `fromFullValues`) are
131 more specialized and perform little or no checking of inputs.
133 Notes
134 -----
135 Like any data ID class, `DataCoordinate` behaves like a dictionary, but
136 with some subtleties:
138 - Both `Dimension` instances and `str` names thereof may be used as keys
139 in lookup operations, but iteration (and `keys`) will yield `Dimension`
140 instances. The `names` property can be used to obtain the corresponding
141 `str` names.
143 - Lookups for implied dimensions (those in ``self.graph.implied``) are
144 supported if and only if `hasFull` returns `True`, and are never
145 included in iteration or `keys`. The `full` property may be used to
146 obtain a mapping whose keys do include implied dimensions.
148 - Equality comparison with other mappings is supported, but it always
149 considers only required dimensions (as well as requiring both operands
150 to identify the same dimensions). This is not quite consistent with the
151 way mappings usually work - normally differing keys imply unequal
152 mappings - but it makes sense in this context because data IDs with the
153 same values for required dimensions but different values for implied
154 dimensions represent a serious problem with the data that
155 `DataCoordinate` cannot generally recognize on its own, and a data ID
156 that knows implied dimension values should still be able to compare as
157 equal to one that does not. This is of course not the way comparisons
158 between simple `dict` data IDs work, and hence using a `DataCoordinate`
159 instance for at least one operand in any data ID comparison is strongly
160 recommended.
162 See Also
163 --------
164 :ref:`lsst.daf.butler-dimensions_data_ids`
165 """
167 __slots__ = ()
169 _serializedType = SerializedDataCoordinate
171 @staticmethod
172 def standardize(
173 mapping: NameLookupMapping[Dimension, DataIdValue] | None = None,
174 *,
175 graph: DimensionGraph | None = None,
176 universe: DimensionUniverse | None = None,
177 defaults: DataCoordinate | None = None,
178 **kwargs: Any,
179 ) -> DataCoordinate:
180 """Standardize the supplied dataId.
182 Adapts an arbitrary mapping and/or additional arguments into a true
183 `DataCoordinate`, or augment an existing one.
185 Parameters
186 ----------
187 mapping : `~collections.abc.Mapping`, optional
188 An informal data ID that maps dimensions or dimension names to
189 their primary key values (may also be a true `DataCoordinate`).
190 graph : `DimensionGraph`
191 The dimensions to be identified by the new `DataCoordinate`.
192 If not provided, will be inferred from the keys of ``mapping`` and
193 ``**kwargs``, and ``universe`` must be provided unless ``mapping``
194 is already a `DataCoordinate`.
195 universe : `DimensionUniverse`
196 All known dimensions and their relationships; used to expand
197 and validate dependencies when ``graph`` is not provided.
198 defaults : `DataCoordinate`, optional
199 Default dimension key-value pairs to use when needed. These are
200 never used to infer ``graph``, and are ignored if a different value
201 is provided for the same key in ``mapping`` or `**kwargs``.
202 **kwargs
203 Additional keyword arguments are treated like additional key-value
204 pairs in ``mapping``.
206 Returns
207 -------
208 coordinate : `DataCoordinate`
209 A validated `DataCoordinate` instance.
211 Raises
212 ------
213 TypeError
214 Raised if the set of optional arguments provided is not supported.
215 KeyError
216 Raised if a key-value pair for a required dimension is missing.
217 """
218 d: dict[str, DataIdValue] = {}
219 if isinstance(mapping, DataCoordinate):
220 if graph is None:
221 if not kwargs:
222 # Already standardized to exactly what we want.
223 return mapping
224 elif kwargs.keys().isdisjoint(graph.dimensions.names):
225 # User provided kwargs, but told us not to use them by
226 # passing in dimensions that are disjoint from those kwargs.
227 # This is not necessarily user error - it's a useful pattern
228 # to pass in all of the key-value pairs you have and let the
229 # code here pull out only what it needs.
230 return mapping.subset(graph)
231 assert universe is None or universe == mapping.universe
232 universe = mapping.universe
233 d.update((name, mapping[name]) for name in mapping.graph.required.names)
234 if mapping.hasFull():
235 d.update((name, mapping[name]) for name in mapping.graph.implied.names)
236 elif isinstance(mapping, NamedKeyMapping):
237 d.update(mapping.byName())
238 elif mapping is not None:
239 d.update(mapping)
240 d.update(kwargs)
241 if graph is None:
242 if defaults is not None:
243 universe = defaults.universe
244 elif universe is None:
245 raise TypeError("universe must be provided if graph is not.")
246 graph = DimensionGraph(universe, names=d.keys())
247 if not graph.dimensions:
248 return DataCoordinate.makeEmpty(graph.universe)
249 if defaults is not None:
250 if defaults.hasFull():
251 for k, v in defaults.full.items():
252 d.setdefault(k.name, v)
253 else:
254 for k, v in defaults.items():
255 d.setdefault(k.name, v)
256 if d.keys() >= graph.dimensions.names:
257 values = tuple(d[name] for name in graph._dataCoordinateIndices)
258 else:
259 try:
260 values = tuple(d[name] for name in graph.required.names)
261 except KeyError as err:
262 raise KeyError(f"No value in data ID ({mapping}) for required dimension {err}.") from err
263 # Some backends cannot handle numpy.int64 type which is a subclass of
264 # numbers.Integral; convert that to int.
265 values = tuple(
266 int(val) if isinstance(val, numbers.Integral) else val for val in values # type: ignore
267 )
268 return _BasicTupleDataCoordinate(graph, values)
270 @staticmethod
271 def makeEmpty(universe: DimensionUniverse) -> DataCoordinate:
272 """Return an empty `DataCoordinate`.
274 It identifies the null set of dimensions.
276 Parameters
277 ----------
278 universe : `DimensionUniverse`
279 Universe to which this null dimension set belongs.
281 Returns
282 -------
283 dataId : `DataCoordinate`
284 A data ID object that identifies no dimensions. `hasFull` and
285 `hasRecords` are guaranteed to return `True`, because both `full`
286 and `records` are just empty mappings.
287 """
288 return _ExpandedTupleDataCoordinate(universe.empty, (), {})
290 @staticmethod
291 def fromRequiredValues(graph: DimensionGraph, values: tuple[DataIdValue, ...]) -> DataCoordinate:
292 """Construct a `DataCoordinate` from required dimension values.
294 This is a low-level interface with at most assertion-level checking of
295 inputs. Most callers should use `standardize` instead.
297 Parameters
298 ----------
299 graph : `DimensionGraph`
300 Dimensions this data ID will identify.
301 values : `tuple` [ `int` or `str` ]
302 Tuple of primary key values corresponding to ``graph.required``,
303 in that order.
305 Returns
306 -------
307 dataId : `DataCoordinate`
308 A data ID object that identifies the given dimensions.
309 ``dataId.hasFull()`` will return `True` if and only if
310 ``graph.implied`` is empty, and ``dataId.hasRecords()`` will never
311 return `True`.
312 """
313 assert len(graph.required) == len(
314 values
315 ), f"Inconsistency between dimensions {graph.required} and required values {values}."
316 return _BasicTupleDataCoordinate(graph, values)
318 @staticmethod
319 def fromFullValues(graph: DimensionGraph, values: tuple[DataIdValue, ...]) -> DataCoordinate:
320 """Construct a `DataCoordinate` from all dimension values.
322 This is a low-level interface with at most assertion-level checking of
323 inputs. Most callers should use `standardize` instead.
325 Parameters
326 ----------
327 graph : `DimensionGraph`
328 Dimensions this data ID will identify.
329 values : `tuple` [ `int` or `str` ]
330 Tuple of primary key values corresponding to
331 ``itertools.chain(graph.required, graph.implied)``, in that order.
332 Note that this is _not_ the same order as ``graph.dimensions``,
333 though these contain the same elements.
335 Returns
336 -------
337 dataId : `DataCoordinate`
338 A data ID object that identifies the given dimensions.
339 ``dataId.hasFull()`` will return `True` if and only if
340 ``graph.implied`` is empty, and ``dataId.hasRecords()`` will never
341 return `True`.
342 """
343 assert len(graph.dimensions) == len(
344 values
345 ), f"Inconsistency between dimensions {graph.dimensions} and full values {values}."
346 return _BasicTupleDataCoordinate(graph, values)
348 def __hash__(self) -> int:
349 return hash((self.graph,) + self.values_tuple())
351 def __eq__(self, other: Any) -> bool:
352 if not isinstance(other, DataCoordinate):
353 other = DataCoordinate.standardize(other, universe=self.universe)
354 return self.graph == other.graph and self.values_tuple() == other.values_tuple()
356 def __repr__(self) -> str:
357 # We can't make repr yield something that could be exec'd here without
358 # printing out the whole DimensionUniverse the graph is derived from.
359 # So we print something that mostly looks like a dict, but doesn't
360 # quote its keys: that's both more compact and something that can't
361 # be mistaken for an actual dict or something that could be exec'd.
362 terms = [f"{d}: {self[d]!r}" for d in self.graph.required.names]
363 if self.hasFull() and self.graph.required != self.graph.dimensions:
364 terms.append("...")
365 return "{{{}}}".format(", ".join(terms))
367 def __lt__(self, other: Any) -> bool:
368 # Allow DataCoordinate to be sorted
369 if not isinstance(other, type(self)):
370 return NotImplemented
371 # Form tuple of tuples for each DataCoordinate:
372 # Unlike repr() we only use required keys here to ensure that
373 # __eq__ can not be true simultaneously with __lt__ being true.
374 self_kv = tuple(self.items())
375 other_kv = tuple(other.items())
377 return self_kv < other_kv
379 def __iter__(self) -> Iterator[Dimension]:
380 return iter(self.keys())
382 def __len__(self) -> int:
383 return len(self.keys())
385 def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
386 return self.graph.required
388 @property
389 def names(self) -> Set[str]:
390 """Names of the required dimensions identified by this data ID.
392 They are returned in the same order as `keys`
393 (`collections.abc.Set` [ `str` ]).
394 """
395 return self.keys().names
397 @abstractmethod
398 def subset(self, graph: DimensionGraph) -> DataCoordinate:
399 """Return a `DataCoordinate` whose graph is a subset of ``self.graph``.
401 Parameters
402 ----------
403 graph : `DimensionGraph`
404 The dimensions identified by the returned `DataCoordinate`.
406 Returns
407 -------
408 coordinate : `DataCoordinate`
409 A `DataCoordinate` instance that identifies only the given
410 dimensions. May be ``self`` if ``graph == self.graph``.
412 Raises
413 ------
414 KeyError
415 Raised if the primary key value for one or more required dimensions
416 is unknown. This may happen if ``graph.issubset(self.graph)`` is
417 `False`, or even if ``graph.issubset(self.graph)`` is `True`, if
418 ``self.hasFull()`` is `False` and
419 ``graph.required.issubset(self.graph.required)`` is `False`. As
420 an example of the latter case, consider trying to go from a data ID
421 with dimensions {instrument, physical_filter, band} to
422 just {instrument, band}; band is implied by
423 physical_filter and hence would have no value in the original data
424 ID if ``self.hasFull()`` is `False`.
426 Notes
427 -----
428 If `hasFull` and `hasRecords` return `True` on ``self``, they will
429 return `True` (respectively) on the returned `DataCoordinate` as well.
430 The converse does not hold.
431 """
432 raise NotImplementedError()
434 @abstractmethod
435 def union(self, other: DataCoordinate) -> DataCoordinate:
436 """Combine two data IDs.
438 Yields a new one that identifies all dimensions that either of them
439 identify.
441 Parameters
442 ----------
443 other : `DataCoordinate`
444 Data ID to combine with ``self``.
446 Returns
447 -------
448 unioned : `DataCoordinate`
449 A `DataCoordinate` instance that satisfies
450 ``unioned.graph == self.graph.union(other.graph)``. Will preserve
451 ``hasFull`` and ``hasRecords`` whenever possible.
453 Notes
454 -----
455 No checking for consistency is performed on values for keys that
456 ``self`` and ``other`` have in common, and which value is included in
457 the returned data ID is not specified.
458 """
459 raise NotImplementedError()
461 @abstractmethod
462 def expanded(
463 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
464 ) -> DataCoordinate:
465 """Return a `DataCoordinate` that holds the given records.
467 Guarantees that `hasRecords` returns `True`.
469 This is a low-level interface with at most assertion-level checking of
470 inputs. Most callers should use `Registry.expandDataId` instead.
472 Parameters
473 ----------
474 records : `~collections.abc.Mapping` [ `str`, `DimensionRecord` or \
475 `None` ]
476 A `NamedKeyMapping` with `DimensionElement` keys or a regular
477 `~collections.abc.Mapping` with `str` (`DimensionElement` name)
478 keys and `DimensionRecord` values. Keys must cover all elements in
479 ``self.graph.elements``. Values may be `None`, but only to reflect
480 actual NULL values in the database, not just records that have not
481 been fetched.
482 """
483 raise NotImplementedError()
485 @property
486 def universe(self) -> DimensionUniverse:
487 """Universe that defines all known compatible dimensions.
489 The univers will be compatible with this coordinate
490 (`DimensionUniverse`).
491 """
492 return self.graph.universe
494 @property
495 @abstractmethod
496 def graph(self) -> DimensionGraph:
497 """Dimensions identified by this data ID (`DimensionGraph`).
499 Note that values are only required to be present for dimensions in
500 ``self.graph.required``; all others may be retrieved (from a
501 `Registry`) given these.
502 """
503 raise NotImplementedError()
505 @abstractmethod
506 def hasFull(self) -> bool:
507 """Whether this data ID contains implied and required values.
509 Returns
510 -------
511 state : `bool`
512 If `True`, `__getitem__`, `get`, and `__contains__` (but not
513 `keys`!) will act as though the mapping includes key-value pairs
514 for implied dimensions, and the `full` property may be used. If
515 `False`, these operations only include key-value pairs for required
516 dimensions, and accessing `full` is an error. Always `True` if
517 there are no implied dimensions.
518 """
519 raise NotImplementedError()
521 @property
522 @abstractmethod
523 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
524 """Return mapping for all dimensions in ``self.graph``.
526 The mapping includes key-value pairs for all dimensions in
527 ``self.graph``, including implied (`NamedKeyMapping`).
529 Accessing this attribute if `hasFull` returns `False` is a logic error
530 that may raise an exception of unspecified type either immediately or
531 when implied keys are accessed via the returned mapping, depending on
532 the implementation and whether assertions are enabled.
533 """
534 raise NotImplementedError()
536 @abstractmethod
537 def values_tuple(self) -> tuple[DataIdValue, ...]:
538 """Return the required values (only) of this data ID as a tuple.
540 In contexts where all data IDs have the same dimensions, comparing and
541 hashing these tuples can be *much* faster than comparing the original
542 `DataCoordinate` instances.
543 """
544 raise NotImplementedError()
546 @abstractmethod
547 def hasRecords(self) -> bool:
548 """Whether this data ID contains records.
550 These are the records for all of the dimension elements it identifies.
552 Returns
553 -------
554 state : `bool`
555 If `True`, the following attributes may be accessed:
557 - `records`
558 - `region`
559 - `timespan`
560 - `pack`
562 If `False`, accessing any of these is considered a logic error.
563 """
564 raise NotImplementedError()
566 @property
567 def records(self) -> NamedKeyMapping[DimensionElement, DimensionRecord | None]:
568 """Return the records.
570 Returns a mapping that contains `DimensionRecord` objects for all
571 elements identified by this data ID (`NamedKeyMapping`).
573 The values of this mapping may be `None` if and only if there is no
574 record for that element with these dimensions in the database (which
575 means some foreign key field must have a NULL value).
577 Accessing this attribute if `hasRecords` returns `False` is a logic
578 error that may raise an exception of unspecified type either
579 immediately or when the returned mapping is used, depending on the
580 implementation and whether assertions are enabled.
581 """
582 assert self.hasRecords(), "records may only be accessed if hasRecords() returns True."
583 return _DataCoordinateRecordsView(self)
585 @abstractmethod
586 def _record(self, name: str) -> DimensionRecord | None:
587 """Protected implementation hook that backs the ``records`` attribute.
589 Parameters
590 ----------
591 name : `str`
592 The name of a `DimensionElement`, guaranteed to be in
593 ``self.graph.elements.names``.
595 Returns
596 -------
597 record : `DimensionRecord` or `None`
598 The dimension record for the given element identified by this
599 data ID, or `None` if there is no such record.
600 """
601 raise NotImplementedError()
603 @property
604 def region(self) -> Region | None:
605 """Spatial region associated with this data ID.
607 (`lsst.sphgeom.Region` or `None`).
609 This is `None` if and only if ``self.graph.spatial`` is empty.
611 Accessing this attribute if `hasRecords` returns `False` is a logic
612 error that may or may not raise an exception, depending on the
613 implementation and whether assertions are enabled.
614 """
615 assert self.hasRecords(), "region may only be accessed if hasRecords() returns True."
616 regions = []
617 for family in self.graph.spatial:
618 element = family.choose(self.graph.elements)
619 record = self._record(element.name)
620 if record is None or record.region is None:
621 return None
622 else:
623 regions.append(record.region)
624 return _intersectRegions(*regions)
626 @property
627 def timespan(self) -> Timespan | None:
628 """Temporal interval associated with this data ID.
630 (`Timespan` or `None`).
632 This is `None` if and only if ``self.graph.timespan`` is empty.
634 Accessing this attribute if `hasRecords` returns `False` is a logic
635 error that may or may not raise an exception, depending on the
636 implementation and whether assertions are enabled.
637 """
638 assert self.hasRecords(), "timespan may only be accessed if hasRecords() returns True."
639 timespans = []
640 for family in self.graph.temporal:
641 element = family.choose(self.graph.elements)
642 record = self._record(element.name)
643 # DimensionRecord subclasses for temporal elements always have
644 # .timespan, but they're dynamic so this can't be type-checked.
645 if record is None or record.timespan is None:
646 return None
647 else:
648 timespans.append(record.timespan)
649 if not timespans:
650 return None
651 elif len(timespans) == 1:
652 return timespans[0]
653 else:
654 return Timespan.intersection(*timespans)
656 @overload
657 def pack(self, name: str, *, returnMaxBits: Literal[True]) -> tuple[int, int]:
658 ...
660 @overload
661 def pack(self, name: str, *, returnMaxBits: Literal[False]) -> int:
662 ...
664 # TODO: Remove this method and its overloads above on DM-38687.
665 @deprecated(
666 "Deprecated in favor of configurable dimension packers. Will be removed after v26.",
667 version="v26",
668 category=FutureWarning,
669 )
670 def pack(self, name: str, *, returnMaxBits: bool = False) -> tuple[int, int] | int:
671 """Pack this data ID into an integer.
673 Parameters
674 ----------
675 name : `str`
676 Name of the `DimensionPacker` algorithm (as defined in the
677 dimension configuration).
678 returnMaxBits : `bool`, optional
679 If `True` (`False` is default), return the maximum number of
680 nonzero bits in the returned integer across all data IDs.
682 Returns
683 -------
684 packed : `int`
685 Integer ID. This ID is unique only across data IDs that have
686 the same values for the packer's "fixed" dimensions.
687 maxBits : `int`, optional
688 Maximum number of nonzero bits in ``packed``. Not returned unless
689 ``returnMaxBits`` is `True`.
691 Notes
692 -----
693 Accessing this attribute if `hasRecords` returns `False` is a logic
694 error that may or may not raise an exception, depending on the
695 implementation and whether assertions are enabled.
696 """
697 assert self.hasRecords(), "pack() may only be called if hasRecords() returns True."
698 return self.universe.makePacker(name, self).pack(self, returnMaxBits=returnMaxBits)
700 def to_simple(self, minimal: bool = False) -> SerializedDataCoordinate:
701 """Convert this class to a simple python type.
703 This is suitable for serialization.
705 Parameters
706 ----------
707 minimal : `bool`, optional
708 Use minimal serialization. If set the records will not be attached.
710 Returns
711 -------
712 simple : `SerializedDataCoordinate`
713 The object converted to simple form.
714 """
715 # Convert to a dict form
716 if self.hasFull():
717 dataId = self.full.byName()
718 else:
719 dataId = self.byName()
720 records: dict[str, SerializedDimensionRecord] | None
721 if not minimal and self.hasRecords():
722 records = {k: v.to_simple() for k, v in self.records.byName().items() if v is not None}
723 else:
724 records = None
726 return SerializedDataCoordinate(dataId=dataId, records=records)
728 @classmethod
729 def from_simple(
730 cls,
731 simple: SerializedDataCoordinate,
732 universe: DimensionUniverse | None = None,
733 registry: Registry | None = None,
734 ) -> DataCoordinate:
735 """Construct a new object from the simplified form.
737 The data is assumed to be of the form returned from the `to_simple`
738 method.
740 Parameters
741 ----------
742 simple : `dict` of [`str`, `Any`]
743 The `dict` returned by `to_simple()`.
744 universe : `DimensionUniverse`
745 The special graph of all known dimensions.
746 registry : `lsst.daf.butler.Registry`, optional
747 Registry from which a universe can be extracted. Can be `None`
748 if universe is provided explicitly.
750 Returns
751 -------
752 dataId : `DataCoordinate`
753 Newly-constructed object.
754 """
755 key = (frozenset(simple.dataId.items()), simple.records is not None)
756 cache = PersistenceContextVars.dataCoordinates.get()
757 if cache is not None and (result := cache.get(key)) is not None:
758 return result
759 if universe is None and registry is None:
760 raise ValueError("One of universe or registry is required to convert a dict to a DataCoordinate")
761 if universe is None and registry is not None:
762 universe = registry.dimensions
763 if universe is None:
764 # this is for mypy
765 raise ValueError("Unable to determine a usable universe")
767 dataId = cls.standardize(simple.dataId, universe=universe)
768 if simple.records:
769 dataId = dataId.expanded(
770 {k: DimensionRecord.from_simple(v, universe=universe) for k, v in simple.records.items()}
771 )
772 if cache is not None:
773 cache[key] = dataId
774 return dataId
776 to_json = to_json_pydantic
777 from_json: ClassVar = classmethod(from_json_pydantic)
780DataId = DataCoordinate | Mapping[str, Any]
781"""A type-annotation alias for signatures that accept both informal data ID
782dictionaries and validated `DataCoordinate` instances.
783"""
786class _DataCoordinateFullView(NamedKeyMapping[Dimension, DataIdValue]):
787 """View class for `DataCoordinate.full`.
789 Provides the default implementation for
790 `DataCoordinate.full`.
792 Parameters
793 ----------
794 target : `DataCoordinate`
795 The `DataCoordinate` instance this object provides a view of.
796 """
798 def __init__(self, target: _BasicTupleDataCoordinate):
799 self._target = target
801 __slots__ = ("_target",)
803 def __repr__(self) -> str:
804 terms = [f"{d}: {self[d]!r}" for d in self._target.graph.dimensions.names]
805 return "{{{}}}".format(", ".join(terms))
807 def __getitem__(self, key: DataIdKey) -> DataIdValue:
808 return self._target[key]
810 def __iter__(self) -> Iterator[Dimension]:
811 return iter(self.keys())
813 def __len__(self) -> int:
814 return len(self.keys())
816 def keys(self) -> NamedValueAbstractSet[Dimension]: # type: ignore
817 return self._target.graph.dimensions
819 @property
820 def names(self) -> Set[str]:
821 # Docstring inherited from `NamedKeyMapping`.
822 return self.keys().names
825class _DataCoordinateRecordsView(NamedKeyMapping[DimensionElement, DimensionRecord | None]):
826 """View class for `DataCoordinate.records`.
828 Provides the default implementation for
829 `DataCoordinate.records`.
831 Parameters
832 ----------
833 target : `DataCoordinate`
834 The `DataCoordinate` instance this object provides a view of.
835 """
837 def __init__(self, target: DataCoordinate):
838 self._target = target
840 __slots__ = ("_target",)
842 def __repr__(self) -> str:
843 terms = [f"{d}: {self[d]!r}" for d in self._target.graph.elements.names]
844 return "{{{}}}".format(", ".join(terms))
846 def __str__(self) -> str:
847 return "\n".join(str(v) for v in self.values())
849 def __getitem__(self, key: DimensionElement | str) -> DimensionRecord | None:
850 if isinstance(key, DimensionElement):
851 key = key.name
852 return self._target._record(key)
854 def __iter__(self) -> Iterator[DimensionElement]:
855 return iter(self.keys())
857 def __len__(self) -> int:
858 return len(self.keys())
860 def keys(self) -> NamedValueAbstractSet[DimensionElement]: # type: ignore
861 return self._target.graph.elements
863 @property
864 def names(self) -> Set[str]:
865 # Docstring inherited from `NamedKeyMapping`.
866 return self.keys().names
869class _BasicTupleDataCoordinate(DataCoordinate):
870 """Standard implementation of `DataCoordinate`.
872 Backed by a tuple of values.
874 This class should only be accessed outside this module via the
875 `DataCoordinate` interface, and should only be constructed via the static
876 methods there.
878 Parameters
879 ----------
880 graph : `DimensionGraph`
881 The dimensions to be identified.
882 values : `tuple` [ `int` or `str` ]
883 Data ID values, ordered to match ``graph._dataCoordinateIndices``. May
884 include values for just required dimensions (which always come first)
885 or all dimensions.
886 """
888 def __init__(self, graph: DimensionGraph, values: tuple[DataIdValue, ...]):
889 self._graph = graph
890 self._values = values
892 __slots__ = ("_graph", "_values")
894 @property
895 def graph(self) -> DimensionGraph:
896 # Docstring inherited from DataCoordinate.
897 return self._graph
899 def __getitem__(self, key: DataIdKey) -> DataIdValue:
900 # Docstring inherited from DataCoordinate.
901 if isinstance(key, Dimension):
902 key = key.name
903 index = self._graph._dataCoordinateIndices[key]
904 try:
905 return self._values[index]
906 except IndexError:
907 # Caller asked for an implied dimension, but this object only has
908 # values for the required ones.
909 raise KeyError(key) from None
911 def byName(self) -> dict[str, DataIdValue]:
912 # Docstring inheritance.
913 # Reimplementation is for optimization; `values_tuple()` is much faster
914 # to iterate over than values() because it doesn't go through
915 # `__getitem__`.
916 return dict(zip(self.names, self.values_tuple(), strict=True))
918 def subset(self, graph: DimensionGraph) -> DataCoordinate:
919 # Docstring inherited from DataCoordinate.
920 if self._graph == graph:
921 return self
922 elif self.hasFull() or self._graph.required >= graph.dimensions:
923 return _BasicTupleDataCoordinate(
924 graph,
925 tuple(self[k] for k in graph._dataCoordinateIndices),
926 )
927 else:
928 return _BasicTupleDataCoordinate(graph, tuple(self[k] for k in graph.required.names))
930 def union(self, other: DataCoordinate) -> DataCoordinate:
931 # Docstring inherited from DataCoordinate.
932 graph = self.graph.union(other.graph)
933 # See if one or both input data IDs is already what we want to return;
934 # if so, return the most complete one we have.
935 if other.graph == graph:
936 if self.graph == graph:
937 # Input data IDs have the same graph (which is also the result
938 # graph), but may not have the same content.
939 # other might have records; self does not, so try other first.
940 # If it at least has full values, it's no worse than self.
941 if other.hasFull():
942 return other
943 else:
944 return self
945 elif other.hasFull():
946 return other
947 # There's some chance that neither self nor other has full values,
948 # but together provide enough to the union to. Let the general
949 # case below handle that.
950 elif self.graph == graph and self.hasFull():
951 # No chance at returning records. If self has full values, it's
952 # the best we can do.
953 return self
954 # General case with actual merging of dictionaries.
955 values = self.full.byName() if self.hasFull() else self.byName()
956 values.update(other.full.byName() if other.hasFull() else other.byName())
957 return DataCoordinate.standardize(values, graph=graph)
959 @property
960 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]:
961 # Docstring inherited.
962 assert self.hasFull(), "full may only be accessed if hasFull() returns True."
963 return _DataCoordinateFullView(self)
965 def expanded(
966 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
967 ) -> DataCoordinate:
968 # Docstring inherited from DataCoordinate
969 values = self._values
970 if not self.hasFull():
971 # Extract a complete values tuple from the attributes of the given
972 # records. It's possible for these to be inconsistent with
973 # self._values (which is a serious problem, of course), but we've
974 # documented this as a no-checking API.
975 values += tuple(getattr(records[d.name], d.primaryKey.name) for d in self._graph.implied)
976 return _ExpandedTupleDataCoordinate(self._graph, values, records)
978 def hasFull(self) -> bool:
979 # Docstring inherited from DataCoordinate.
980 return len(self._values) == len(self._graph._dataCoordinateIndices)
982 def hasRecords(self) -> bool:
983 # Docstring inherited from DataCoordinate.
984 return False
986 def values_tuple(self) -> tuple[DataIdValue, ...]:
987 # Docstring inherited from DataCoordinate.
988 return self._values[: len(self._graph.required)]
990 def _record(self, name: str) -> DimensionRecord | None:
991 # Docstring inherited from DataCoordinate.
992 raise AssertionError()
994 def __reduce__(self) -> tuple[Any, ...]:
995 return (_BasicTupleDataCoordinate, (self._graph, self._values))
997 def __getattr__(self, name: str) -> Any:
998 if name in self.graph.elements.names:
999 raise AttributeError(
1000 f"Dimension record attribute {name!r} is only available on expanded DataCoordinates."
1001 )
1002 raise AttributeError(name)
1005class _ExpandedTupleDataCoordinate(_BasicTupleDataCoordinate):
1006 """A `DataCoordinate` implementation that can hold `DimensionRecord`.
1008 This class should only be accessed outside this module via the
1009 `DataCoordinate` interface, and should only be constructed via calls to
1010 `DataCoordinate.expanded`.
1012 Parameters
1013 ----------
1014 graph : `DimensionGraph`
1015 The dimensions to be identified.
1016 values : `tuple` [ `int` or `str` ]
1017 Data ID values, ordered to match ``graph._dataCoordinateIndices``.
1018 May include values for just required dimensions (which always come
1019 first) or all dimensions.
1020 records : `~collections.abc.Mapping` [ `str`, `DimensionRecord` or `None` ]
1021 A `NamedKeyMapping` with `DimensionElement` keys or a regular
1022 `~collections.abc.Mapping` with `str` (`DimensionElement` name) keys
1023 and `DimensionRecord` values. Keys must cover all elements in
1024 ``self.graph.elements``. Values may be `None`, but only to reflect
1025 actual NULL values in the database, not just records that have not
1026 been fetched.
1027 """
1029 def __init__(
1030 self,
1031 graph: DimensionGraph,
1032 values: tuple[DataIdValue, ...],
1033 records: NameLookupMapping[DimensionElement, DimensionRecord | None],
1034 ):
1035 super().__init__(graph, values)
1036 assert super().hasFull(), "This implementation requires full dimension records."
1037 self._records = records
1039 __slots__ = ("_records",)
1041 def subset(self, graph: DimensionGraph) -> DataCoordinate:
1042 # Docstring inherited from DataCoordinate.
1043 if self._graph == graph:
1044 return self
1045 return _ExpandedTupleDataCoordinate(
1046 graph, tuple(self[k] for k in graph._dataCoordinateIndices), records=self._records
1047 )
1049 def expanded(
1050 self, records: NameLookupMapping[DimensionElement, DimensionRecord | None]
1051 ) -> DataCoordinate:
1052 # Docstring inherited from DataCoordinate.
1053 return self
1055 def union(self, other: DataCoordinate) -> DataCoordinate:
1056 # Docstring inherited from DataCoordinate.
1057 graph = self.graph.union(other.graph)
1058 # See if one or both input data IDs is already what we want to return;
1059 # if so, return the most complete one we have.
1060 if self.graph == graph:
1061 # self has records, so even if other is also a valid result, it's
1062 # no better.
1063 return self
1064 if other.graph == graph and other.hasFull():
1065 # If other has full values, and self does not identify some of
1066 # those, it's the base we can do. It may have records, too.
1067 return other
1068 # If other does not have full values, there's a chance self may
1069 # provide the values needed to complete it. For example, self
1070 # could be {band} while other could be
1071 # {instrument, physical_filter, band}, with band unknown.
1072 # General case with actual merging of dictionaries.
1073 values = self.full.byName()
1074 values.update(other.full.byName() if other.hasFull() else other.byName())
1075 basic = DataCoordinate.standardize(values, graph=graph)
1076 # See if we can add records.
1077 if self.hasRecords() and other.hasRecords():
1078 # Sometimes the elements of a union of graphs can contain elements
1079 # that weren't in either input graph (because graph unions are only
1080 # on dimensions). e.g. {visit} | {detector} brings along
1081 # visit_detector_region.
1082 elements = set(graph.elements.names)
1083 elements -= self.graph.elements.names
1084 elements -= other.graph.elements.names
1085 if not elements:
1086 records = NamedKeyDict[DimensionElement, DimensionRecord | None](self.records)
1087 records.update(other.records)
1088 return basic.expanded(records.freeze())
1089 return basic
1091 def hasFull(self) -> bool:
1092 # Docstring inherited from DataCoordinate.
1093 return True
1095 def hasRecords(self) -> bool:
1096 # Docstring inherited from DataCoordinate.
1097 return True
1099 def _record(self, name: str) -> DimensionRecord | None:
1100 # Docstring inherited from DataCoordinate.
1101 return self._records[name]
1103 def __reduce__(self) -> tuple[Any, ...]:
1104 return (_ExpandedTupleDataCoordinate, (self._graph, self._values, self._records))
1106 def __getattr__(self, name: str) -> Any:
1107 try:
1108 return self._record(name)
1109 except KeyError:
1110 raise AttributeError(name) from None
1112 def __dir__(self) -> list[str]:
1113 result = list(super().__dir__())
1114 result.extend(self.graph.elements.names)
1115 return result