Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22# 

23# Design notes for this module are in 

24# doc/lsst.daf.butler/dev/dataCoordinate.py. 

25# 

26 

27from __future__ import annotations 

28 

29__all__ = ("DataCoordinate", "DataId") 

30 

31from abc import abstractmethod 

32import numbers 

33from typing import ( 

34 AbstractSet, 

35 Any, 

36 Dict, 

37 Iterator, 

38 Mapping, 

39 Optional, 

40 Tuple, 

41 TYPE_CHECKING, 

42 Union, 

43) 

44 

45from lsst.sphgeom import Region 

46from ..named import NamedKeyMapping, NameLookupMapping, NamedValueSet 

47from ..timespan import Timespan 

48from .elements import Dimension, DimensionElement 

49from .graph import DimensionGraph 

50from .records import DimensionRecord 

51 

52if TYPE_CHECKING: # Imports needed only for type annotations; may be circular. 52 ↛ 53line 52 didn't jump to line 53, because the condition on line 52 was never true

53 from .universe import DimensionUniverse 

54 

55DataIdKey = Union[str, Dimension] 

56"""Type annotation alias for the keys that can be used to index a 

57DataCoordinate. 

58""" 

59 

60DataIdValue = Union[str, int, None] 

61"""Type annotation alias for the values that can be present in a 

62DataCoordinate or other data ID. 

63""" 

64 

65 

66def _intersectRegions(*args: Region) -> Optional[Region]: 

67 """Return the intersection of several regions. 

68 

69 For internal use by `ExpandedDataCoordinate` only. 

70 

71 If no regions are provided, returns `None`. 

72 

73 This is currently a placeholder; it actually returns `NotImplemented` 

74 (it does *not* raise an exception) when multiple regions are given, which 

75 propagates to `ExpandedDataCoordinate`. This reflects the fact that we 

76 don't want to fail to construct an `ExpandedDataCoordinate` entirely when 

77 we can't compute its region, and at present we don't have a high-level use 

78 case for the regions of these particular data IDs. 

79 """ 

80 if len(args) == 0: 

81 return None 

82 elif len(args) == 1: 

83 return args[0] 

84 else: 

85 return NotImplemented 

86 

87 

88class DataCoordinate(NamedKeyMapping[Dimension, DataIdValue]): 

89 """An immutable data ID dictionary that guarantees that its key-value pairs 

90 identify at least all required dimensions in a `DimensionGraph`. 

91 

92 `DataCoordinateSet` itself is an ABC, but provides `staticmethod` factory 

93 functions for private concrete implementations that should be sufficient 

94 for most purposes. `standardize` is the most flexible and safe of these; 

95 the others (`makeEmpty`, `fromRequiredValues`, and `fromFullValues`) are 

96 more specialized and perform little or no checking of inputs. 

97 

98 Notes 

99 ----- 

100 Like any data ID class, `DataCoordinate` behaves like a dictionary, but 

101 with some subtleties: 

102 

103 - Both `Dimension` instances and `str` names thereof may be used as keys 

104 in lookup operations, but iteration (and `keys`) will yield `Dimension` 

105 instances. The `names` property can be used to obtain the corresponding 

106 `str` names. 

107 

108 - Lookups for implied dimensions (those in ``self.graph.implied``) are 

109 supported if and only if `hasFull` returns `True`, and are never 

110 included in iteration or `keys`. The `full` property may be used to 

111 obtain a mapping whose keys do include implied dimensions. 

112 

113 - Equality comparison with other mappings is supported, but it always 

114 considers only required dimensions (as well as requiring both operands 

115 to identify the same dimensions). This is not quite consistent with the 

116 way mappings usually work - normally differing keys imply unequal 

117 mappings - but it makes sense in this context because data IDs with the 

118 same values for required dimensions but different values for implied 

119 dimensions represent a serious problem with the data that 

120 `DataCoordinate` cannot generally recognize on its own, and a data ID 

121 that knows implied dimension values should still be able to compare as 

122 equal to one that does not. This is of course not the way comparisons 

123 between simple `dict` data IDs work, and hence using a `DataCoordinate` 

124 instance for at least one operand in any data ID comparison is strongly 

125 recommended. 

126 """ 

127 

128 __slots__ = () 

129 

130 @staticmethod 

131 def standardize( 

132 mapping: Optional[NameLookupMapping[Dimension, DataIdValue]] = None, 

133 *, 

134 graph: Optional[DimensionGraph] = None, 

135 universe: Optional[DimensionUniverse] = None, 

136 **kwargs: Any 

137 ) -> DataCoordinate: 

138 """Adapt an arbitrary mapping and/or additional arguments into a true 

139 `DataCoordinate`, or augment an existing one. 

140 

141 Parameters 

142 ---------- 

143 mapping : `~collections.abc.Mapping`, optional 

144 An informal data ID that maps dimensions or dimension names to 

145 their primary key values (may also be a true `DataCoordinate`). 

146 graph : `DimensionGraph` 

147 The dimensions to be identified by the new `DataCoordinate`. 

148 If not provided, will be inferred from the keys of ``mapping``, 

149 and ``universe`` must be provided unless ``mapping`` is already a 

150 `DataCoordinate`. 

151 universe : `DimensionUniverse` 

152 All known dimensions and their relationships; used to expand 

153 and validate dependencies when ``graph`` is not provided. 

154 **kwargs 

155 Additional keyword arguments are treated like additional key-value 

156 pairs in ``mapping``. 

157 

158 Returns 

159 ------- 

160 coordinate : `DataCoordinate` 

161 A validated `DataCoordinate` instance. 

162 

163 Raises 

164 ------ 

165 TypeError 

166 Raised if the set of optional arguments provided is not supported. 

167 KeyError 

168 Raised if a key-value pair for a required dimension is missing. 

169 """ 

170 d: Dict[str, DataIdValue] = {} 

171 if isinstance(mapping, DataCoordinate): 

172 if graph is None: 

173 if not kwargs: 

174 # Already standardized to exactly what we want. 

175 return mapping 

176 elif kwargs.keys().isdisjoint(graph.dimensions.names): 

177 # User provided kwargs, but told us not to use them by 

178 # passing in dimensions that are disjoint from those kwargs. 

179 # This is not necessarily user error - it's a useful pattern 

180 # to pass in all of the key-value pairs you have and let the 

181 # code here pull out only what it needs. 

182 return mapping.subset(graph) 

183 assert universe is None or universe == mapping.universe 

184 universe = mapping.universe 

185 d.update((name, mapping[name]) for name in mapping.graph.required.names) 

186 if mapping.hasFull(): 

187 d.update((name, mapping[name]) for name in mapping.graph.implied.names) 

188 elif isinstance(mapping, NamedKeyMapping): 

189 d.update(mapping.byName()) 

190 elif mapping is not None: 

191 d.update(mapping) 

192 d.update(kwargs) 

193 if graph is None: 

194 if universe is None: 

195 raise TypeError("universe must be provided if graph is not.") 

196 graph = DimensionGraph(universe, names=d.keys()) 

197 if not graph.dimensions: 

198 return DataCoordinate.makeEmpty(graph.universe) 

199 if d.keys() >= graph.dimensions.names: 

200 values = tuple(d[name] for name in graph._dataCoordinateIndices.keys()) 

201 else: 

202 try: 

203 values = tuple(d[name] for name in graph.required.names) 

204 except KeyError as err: 

205 raise KeyError(f"No value in data ID ({mapping}) for required dimension {err}.") from err 

206 # Some backends cannot handle numpy.int64 type which is a subclass of 

207 # numbers.Integral; convert that to int. 

208 values = tuple(int(val) if isinstance(val, numbers.Integral) # type: ignore 

209 else val for val in values) 

210 return _BasicTupleDataCoordinate(graph, values) 

211 

212 @staticmethod 

213 def makeEmpty(universe: DimensionUniverse) -> DataCoordinate: 

214 """Return an empty `DataCoordinate` that identifies the null set of 

215 dimensions. 

216 

217 Parameters 

218 ---------- 

219 universe : `DimensionUniverse` 

220 Universe to which this null dimension set belongs. 

221 

222 Returns 

223 ------- 

224 dataId : `DataCoordinate` 

225 A data ID object that identifies no dimensions. `hasFull` and 

226 `hasRecords` are guaranteed to return `True`, because both `full` 

227 and `records` are just empty mappings. 

228 """ 

229 return _ExpandedTupleDataCoordinate(universe.empty, (), {}) 

230 

231 @staticmethod 

232 def fromRequiredValues(graph: DimensionGraph, values: Tuple[DataIdValue, ...]) -> DataCoordinate: 

233 """Construct a `DataCoordinate` from a tuple of dimension values that 

234 identify only required dimensions. 

235 

236 This is a low-level interface with at most assertion-level checking of 

237 inputs. Most callers should use `standardize` instead. 

238 

239 Parameters 

240 ---------- 

241 graph : `DimensionGraph` 

242 Dimensions this data ID will identify. 

243 values : `tuple` [ `int` or `str` ] 

244 Tuple of primary key values corresponding to ``graph.required``, 

245 in that order. 

246 

247 Returns 

248 ------- 

249 dataId : `DataCoordinate` 

250 A data ID object that identifies the given dimensions. 

251 ``dataId.hasFull()`` will return `True` if and only if 

252 ``graph.implied`` is empty, and ``dataId.hasRecords()`` will never 

253 return `True`. 

254 """ 

255 assert len(graph.required) == len(values), \ 

256 f"Inconsistency between dimensions {graph.required} and required values {values}." 

257 return _BasicTupleDataCoordinate(graph, values) 

258 

259 @staticmethod 

260 def fromFullValues(graph: DimensionGraph, values: Tuple[DataIdValue, ...]) -> DataCoordinate: 

261 """Construct a `DataCoordinate` from a tuple of dimension values that 

262 identify all dimensions. 

263 

264 This is a low-level interface with at most assertion-level checking of 

265 inputs. Most callers should use `standardize` instead. 

266 

267 Parameters 

268 ---------- 

269 graph : `DimensionGraph` 

270 Dimensions this data ID will identify. 

271 values : `tuple` [ `int` or `str` ] 

272 Tuple of primary key values corresponding to 

273 ``itertools.chain(graph.required, graph.implied)``, in that order. 

274 Note that this is _not_ the same order as ``graph.dimensions``, 

275 though these contain the same elements. 

276 

277 Returns 

278 ------- 

279 dataId : `DataCoordinate` 

280 A data ID object that identifies the given dimensions. 

281 ``dataId.hasFull()`` will return `True` if and only if 

282 ``graph.implied`` is empty, and ``dataId.hasRecords()`` will never 

283 return `True`. 

284 """ 

285 assert len(graph.dimensions) == len(values), \ 

286 f"Inconsistency between dimensions {graph.dimensions} and full values {values}." 

287 return _BasicTupleDataCoordinate(graph, values) 

288 

289 def __hash__(self) -> int: 

290 return hash((self.graph,) + tuple(self[d.name] for d in self.graph.required)) 

291 

292 def __eq__(self, other: Any) -> bool: 

293 if not isinstance(other, DataCoordinate): 

294 other = DataCoordinate.standardize(other, universe=self.universe) 

295 return self.graph == other.graph and all(self[d.name] == other[d.name] for d in self.graph.required) 

296 

297 def __repr__(self) -> str: 

298 # We can't make repr yield something that could be exec'd here without 

299 # printing out the whole DimensionUniverse the graph is derived from. 

300 # So we print something that mostly looks like a dict, but doesn't 

301 # quote its keys: that's both more compact and something that can't 

302 # be mistaken for an actual dict or something that could be exec'd. 

303 return "{{{}}}".format( 

304 ', '.join(f"{d}: {self.get(d, '?')}" for d in self.graph.dimensions.names) 

305 ) 

306 

307 def __lt__(self, other: Any) -> bool: 

308 # Allow DataCoordinate to be sorted 

309 # The sort order itself does not matter, just that the order 

310 # is reproducible. repr() already includes the keys/values for 

311 # this coordinate so use that. 

312 return repr(self) < repr(other) 

313 

314 def __iter__(self) -> Iterator[Dimension]: 

315 return iter(self.keys()) 

316 

317 def __len__(self) -> int: 

318 return len(self.keys()) 

319 

320 def keys(self) -> NamedValueSet[Dimension]: 

321 return self.graph.required 

322 

323 @property 

324 def names(self) -> AbstractSet[str]: 

325 """The names of the required dimensions identified by this data ID, in 

326 the same order as `keys` (`collections.abc.Set` [ `str` ]). 

327 """ 

328 return self.keys().names 

329 

330 @abstractmethod 

331 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

332 """Return a `DataCoordinate` whose graph is a subset of ``self.graph``. 

333 

334 Parameters 

335 ---------- 

336 graph : `DimensionGraph` 

337 The dimensions identified by the returned `DataCoordinate`. 

338 

339 Returns 

340 ------- 

341 coordinate : `DataCoordinate` 

342 A `DataCoordinate` instance that identifies only the given 

343 dimensions. May be ``self`` if ``graph == self.graph``. 

344 

345 Raises 

346 ------ 

347 KeyError 

348 Raised if the primary key value for one or more required dimensions 

349 is unknown. This may happen if ``graph.issubset(self.graph)`` is 

350 `False`, or even if ``graph.issubset(self.graph)`` is `True`, if 

351 ``self.hasFull()`` is `False` and 

352 ``graph.required.issubset(self.graph.required)`` is `False`. As 

353 an example of the latter case, consider trying to go from a data ID 

354 with dimensions {instrument, physical_filter, band} to 

355 just {instrument, band}; band is implied by 

356 physical_filter and hence would have no value in the original data 

357 ID if ``self.hasFull()`` is `False`. 

358 

359 Notes 

360 ----- 

361 If `hasFull` and `hasRecords` return `True` on ``self``, they will 

362 return `True` (respectively) on the returned `DataCoordinate` as well. 

363 The converse does not hold. 

364 """ 

365 raise NotImplementedError() 

366 

367 @abstractmethod 

368 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

369 ) -> DataCoordinate: 

370 """Return a `DataCoordinate` that holds the given records and 

371 guarantees that `hasRecords` returns `True`. 

372 

373 This is a low-level interface with at most assertion-level checking of 

374 inputs. Most callers should use `Registry.expandDataId` instead. 

375 

376 Parameters 

377 ---------- 

378 records : `Mapping` [ `str`, `DimensionRecord` or `None` ] 

379 A `NamedKeyMapping` with `DimensionElement` keys or a regular 

380 `Mapping` with `str` (`DimensionElement` name) keys and 

381 `DimensionRecord` values. Keys must cover all elements in 

382 ``self.graph.elements``. Values may be `None`, but only to reflect 

383 actual NULL values in the database, not just records that have not 

384 been fetched. 

385 """ 

386 raise NotImplementedError() 

387 

388 @property 

389 def universe(self) -> DimensionUniverse: 

390 """The universe that defines all known dimensions compatible with 

391 this coordinate (`DimensionUniverse`). 

392 """ 

393 return self.graph.universe 

394 

395 @property 

396 @abstractmethod 

397 def graph(self) -> DimensionGraph: 

398 """The dimensions identified by this data ID (`DimensionGraph`). 

399 

400 Note that values are only required to be present for dimensions in 

401 ``self.graph.required``; all others may be retrieved (from a 

402 `Registry`) given these. 

403 """ 

404 raise NotImplementedError() 

405 

406 @abstractmethod 

407 def hasFull(self) -> bool: 

408 """Whether this data ID contains values for implied as well as 

409 required dimensions. 

410 

411 Returns 

412 ------- 

413 state : `bool` 

414 If `True`, `__getitem__`, `get`, and `__contains__` (but not 

415 `keys`!) will act as though the mapping includes key-value pairs 

416 for implied dimensions, and the `full` property may be used. If 

417 `False`, these operations only include key-value pairs for required 

418 dimensions, and accessing `full` is an error. Always `True` if 

419 there are no implied dimensions. 

420 """ 

421 raise NotImplementedError() 

422 

423 @property 

424 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]: 

425 """A mapping that includes key-value pairs for all dimensions in 

426 ``self.graph``, including implied (`NamedKeyMapping`). 

427 

428 Accessing this attribute if `hasFull` returns `False` is a logic error 

429 that may raise an exception of unspecified type either immediately or 

430 when implied keys are accessed via the returned mapping, depending on 

431 the implementation and whether assertions are enabled. 

432 """ 

433 assert self.hasFull(), "full may only be accessed if hasRecords() returns True." 

434 return _DataCoordinateFullView(self) 

435 

436 @abstractmethod 

437 def hasRecords(self) -> bool: 

438 """Whether this data ID contains records for all of the dimension 

439 elements it identifies. 

440 

441 Returns 

442 ------- 

443 state : `bool` 

444 If `True`, the following attributes may be accessed: 

445 

446 - `records` 

447 - `region` 

448 - `timespan` 

449 - `pack` 

450 

451 If `False`, accessing any of these is considered a logic error. 

452 """ 

453 raise NotImplementedError() 

454 

455 @property 

456 def records(self) -> NamedKeyMapping[DimensionElement, Optional[DimensionRecord]]: 

457 """A mapping that contains `DimensionRecord` objects for all elements 

458 identified by this data ID (`NamedKeyMapping`). 

459 

460 The values of this mapping may be `None` if and only if there is no 

461 record for that element with these dimensions in the database (which 

462 means some foreign key field must have a NULL value). 

463 

464 Accessing this attribute if `hasRecords` returns `False` is a logic 

465 error that may raise an exception of unspecified type either 

466 immediately or when the returned mapping is used, depending on the 

467 implementation and whether assertions are enabled. 

468 """ 

469 assert self.hasRecords(), "records may only be accessed if hasRecords() returns True." 

470 return _DataCoordinateRecordsView(self) 

471 

472 @abstractmethod 

473 def _record(self, name: str) -> Optional[DimensionRecord]: 

474 """Protected implementation hook that backs the ``records`` attribute. 

475 

476 Parameters 

477 ---------- 

478 name : `str` 

479 The name of a `DimensionElement`, guaranteed to be in 

480 ``self.graph.elements.names``. 

481 

482 Returns 

483 ------- 

484 record : `DimensionRecord` or `None` 

485 The dimension record for the given element identified by this 

486 data ID, or `None` if there is no such record. 

487 """ 

488 raise NotImplementedError() 

489 

490 @property 

491 def region(self) -> Optional[Region]: 

492 """The spatial region associated with this data ID 

493 (`lsst.sphgeom.Region` or `None`). 

494 

495 This is `None` if and only if ``self.graph.spatial`` is empty. 

496 

497 Accessing this attribute if `hasRecords` returns `False` is a logic 

498 error that may or may not raise an exception, depending on the 

499 implementation and whether assertions are enabled. 

500 """ 

501 assert self.hasRecords(), "region may only be accessed if hasRecords() returns True." 

502 regions = [] 

503 for element in self.graph.spatial: 

504 record = self._record(element.name) 

505 # DimensionRecord subclasses for spatial elements always have a 

506 # .region, but they're dynamic so this can't be type-checked. 

507 if record is None or record.region is None: # type: ignore 

508 return None 

509 else: 

510 regions.append(record.region) # type:ignore 

511 return _intersectRegions(*regions) 

512 

513 @property 

514 def timespan(self) -> Optional[Timespan]: 

515 """The temporal interval associated with this data ID 

516 (`Timespan` or `None`). 

517 

518 This is `None` if and only if ``self.graph.timespan`` is empty. 

519 

520 Accessing this attribute if `hasRecords` returns `False` is a logic 

521 error that may or may not raise an exception, depending on the 

522 implementation and whether assertions are enabled. 

523 """ 

524 assert self.hasRecords(), "timespan may only be accessed if hasRecords() returns True." 

525 timespans = [] 

526 for element in self.graph.temporal: 

527 record = self._record(element.name) 

528 # DimensionRecord subclasses for temporal elements always have 

529 # .timespan, but they're dynamic so this can't be type-checked. 

530 if record is None or record.timespan is None: 

531 return None 

532 else: 

533 timespans.append(record.timespan) 

534 return Timespan.intersection(*timespans) 

535 

536 def pack(self, name: str, *, returnMaxBits: bool = False) -> Union[Tuple[int, int], int]: 

537 """Pack this data ID into an integer. 

538 

539 Parameters 

540 ---------- 

541 name : `str` 

542 Name of the `DimensionPacker` algorithm (as defined in the 

543 dimension configuration). 

544 returnMaxBits : `bool`, optional 

545 If `True` (`False` is default), return the maximum number of 

546 nonzero bits in the returned integer across all data IDs. 

547 

548 Returns 

549 ------- 

550 packed : `int` 

551 Integer ID. This ID is unique only across data IDs that have 

552 the same values for the packer's "fixed" dimensions. 

553 maxBits : `int`, optional 

554 Maximum number of nonzero bits in ``packed``. Not returned unless 

555 ``returnMaxBits`` is `True`. 

556 

557 Notes 

558 ----- 

559 Accessing this attribute if `hasRecords` returns `False` is a logic 

560 error that may or may not raise an exception, depending on the 

561 implementation and whether assertions are enabled. 

562 """ 

563 assert self.hasRecords(), "pack() may only be called if hasRecords() returns True." 

564 return self.universe.makePacker(name, self).pack(self, returnMaxBits=returnMaxBits) 

565 

566 

567DataId = Union[DataCoordinate, Mapping[str, Any]] 

568"""A type-annotation alias for signatures that accept both informal data ID 

569dictionaries and validated `DataCoordinate` instances. 

570""" 

571 

572 

573class _DataCoordinateFullView(NamedKeyMapping[Dimension, DataIdValue]): 

574 """View class that provides the default implementation for 

575 `DataCoordinate.full`. 

576 

577 Parameters 

578 ---------- 

579 target : `DataCoordinate` 

580 The `DataCoordinate` instance this object provides a view of. 

581 """ 

582 def __init__(self, target: DataCoordinate): 

583 self._target = target 

584 

585 __slots__ = ("_target",) 

586 

587 def __getitem__(self, key: DataIdKey) -> DataIdValue: 

588 return self._target[key] 

589 

590 def __iter__(self) -> Iterator[Dimension]: 

591 return iter(self.keys()) 

592 

593 def __len__(self) -> int: 

594 return len(self.keys()) 

595 

596 def keys(self) -> NamedValueSet[Dimension]: 

597 return self._target.graph.dimensions 

598 

599 @property 

600 def names(self) -> AbstractSet[str]: 

601 # Docstring inherited from `NamedKeyMapping`. 

602 return self.keys().names 

603 

604 

605class _DataCoordinateRecordsView(NamedKeyMapping[DimensionElement, Optional[DimensionRecord]]): 

606 """View class that provides the default implementation for 

607 `DataCoordinate.records`. 

608 

609 Parameters 

610 ---------- 

611 target : `DataCoordinate` 

612 The `DataCoordinate` instance this object provides a view of. 

613 """ 

614 def __init__(self, target: DataCoordinate): 

615 self._target = target 

616 

617 __slots__ = ("_target",) 

618 

619 def __getitem__(self, key: Union[DimensionElement, str]) -> Optional[DimensionRecord]: 

620 if isinstance(key, DimensionElement): 

621 key = key.name 

622 return self._target._record(key) 

623 

624 def __iter__(self) -> Iterator[DimensionElement]: 

625 return iter(self.keys()) 

626 

627 def __len__(self) -> int: 

628 return len(self.keys()) 

629 

630 def keys(self) -> NamedValueSet[DimensionElement]: 

631 return self._target.graph.elements 

632 

633 @property 

634 def names(self) -> AbstractSet[str]: 

635 # Docstring inherited from `NamedKeyMapping`. 

636 return self.keys().names 

637 

638 

639class _BasicTupleDataCoordinate(DataCoordinate): 

640 """Standard implementation of `DataCoordinate`, backed by a tuple of 

641 values. 

642 

643 This class should only be accessed outside this module via the 

644 `DataCoordinate` interface, and should only be constructed via the static 

645 methods there. 

646 

647 Parameters 

648 ---------- 

649 graph : `DimensionGraph` 

650 The dimensions to be identified. 

651 values : `tuple` [ `int` or `str` ] 

652 Data ID values, ordered to match ``graph._dataCoordinateIndices``. May 

653 include values for just required dimensions (which always come first) 

654 or all dimensions. 

655 """ 

656 def __init__(self, graph: DimensionGraph, values: Tuple[DataIdValue, ...]): 

657 self._graph = graph 

658 self._values = values 

659 

660 __slots__ = ("_graph", "_values") 

661 

662 @property 

663 def graph(self) -> DimensionGraph: 

664 # Docstring inherited from DataCoordinate. 

665 return self._graph 

666 

667 def __getitem__(self, key: DataIdKey) -> DataIdValue: 

668 # Docstring inherited from DataCoordinate. 

669 if isinstance(key, Dimension): 

670 key = key.name 

671 index = self._graph._dataCoordinateIndices[key] 

672 try: 

673 return self._values[index] 

674 except IndexError: 

675 # Caller asked for an implied dimension, but this object only has 

676 # values for the required ones. 

677 raise KeyError(key) 

678 

679 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

680 # Docstring inherited from DataCoordinate. 

681 if self._graph == graph: 

682 return self 

683 elif self.hasFull() or self._graph.required.issuperset(graph.dimensions): 

684 return _BasicTupleDataCoordinate( 

685 graph, 

686 tuple(self[k] for k in graph._dataCoordinateIndices.keys()), 

687 ) 

688 else: 

689 return _BasicTupleDataCoordinate(graph, tuple(self[k] for k in graph.required.names)) 

690 

691 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

692 ) -> DataCoordinate: 

693 # Docstring inherited from DataCoordinate 

694 values = self._values 

695 if not self.hasFull(): 

696 # Extract a complete values tuple from the attributes of the given 

697 # records. It's possible for these to be inconsistent with 

698 # self._values (which is a serious problem, of course), but we've 

699 # documented this as a no-checking API. 

700 values += tuple(getattr(records[d.name], d.primaryKey.name) for d in self._graph.implied) 

701 return _ExpandedTupleDataCoordinate(self._graph, values, records) 

702 

703 def hasFull(self) -> bool: 

704 # Docstring inherited from DataCoordinate. 

705 return len(self._values) == len(self._graph._dataCoordinateIndices) 

706 

707 def hasRecords(self) -> bool: 

708 # Docstring inherited from DataCoordinate. 

709 return False 

710 

711 def _record(self, name: str) -> Optional[DimensionRecord]: 

712 # Docstring inherited from DataCoordinate. 

713 assert False 

714 

715 

716class _ExpandedTupleDataCoordinate(_BasicTupleDataCoordinate): 

717 """A `DataCoordinate` implementation that can hold `DimensionRecord` 

718 objects. 

719 

720 This class should only be accessed outside this module via the 

721 `DataCoordinate` interface, and should only be constructed via calls to 

722 `DataCoordinate.expanded`. 

723 

724 Parameters 

725 ---------- 

726 graph : `DimensionGraph` 

727 The dimensions to be identified. 

728 values : `tuple` [ `int` or `str` ] 

729 Data ID values, ordered to match ``graph._dataCoordinateIndices``. 

730 May include values for just required dimensions (which always come 

731 first) or all dimensions. 

732 records : `Mapping` [ `str`, `DimensionRecord` or `None` ] 

733 A `NamedKeyMapping` with `DimensionElement` keys or a regular 

734 `Mapping` with `str` (`DimensionElement` name) keys and 

735 `DimensionRecord` values. Keys must cover all elements in 

736 ``self.graph.elements``. Values may be `None`, but only to reflect 

737 actual NULL values in the database, not just records that have not 

738 been fetched. 

739 """ 

740 def __init__(self, graph: DimensionGraph, values: Tuple[DataIdValue, ...], 

741 records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]]): 

742 super().__init__(graph, values) 

743 assert super().hasFull(), "This implementation requires full dimension records." 

744 self._records = records 

745 

746 __slots__ = ("_records",) 

747 

748 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

749 # Docstring inherited from DataCoordinate. 

750 if self._graph == graph: 

751 return self 

752 return _ExpandedTupleDataCoordinate(graph, 

753 tuple(self[k] for k in graph._dataCoordinateIndices.keys()), 

754 records=self._records) 

755 

756 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

757 ) -> DataCoordinate: 

758 # Docstring inherited from DataCoordinate. 

759 return self 

760 

761 def hasFull(self) -> bool: 

762 # Docstring inherited from DataCoordinate. 

763 return True 

764 

765 def hasRecords(self) -> bool: 

766 # Docstring inherited from DataCoordinate. 

767 return True 

768 

769 def _record(self, name: str) -> Optional[DimensionRecord]: 

770 # Docstring inherited from DataCoordinate. 

771 return self._records[name]