Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22# 

23# Design notes for this module are in 

24# doc/lsst.daf.butler/dev/dataCoordinate.py. 

25# 

26 

27from __future__ import annotations 

28 

29__all__ = ("DataCoordinate", "DataId") 

30 

31from abc import abstractmethod 

32import numbers 

33from typing import ( 

34 AbstractSet, 

35 Any, 

36 Dict, 

37 Iterator, 

38 Mapping, 

39 Optional, 

40 Tuple, 

41 TYPE_CHECKING, 

42 Union, 

43) 

44 

45import astropy.time 

46 

47from lsst.sphgeom import Region 

48from ..named import NamedKeyMapping, NameLookupMapping, NamedValueSet 

49from ..timespan import Timespan 

50from .elements import Dimension, DimensionElement 

51from .graph import DimensionGraph 

52from .records import DimensionRecord 

53 

54if TYPE_CHECKING: # Imports needed only for type annotations; may be circular. 54 ↛ 55line 54 didn't jump to line 55, because the condition on line 54 was never true

55 from .universe import DimensionUniverse 

56 

57DataIdKey = Union[str, Dimension] 

58"""Type annotation alias for the keys that can be used to index a 

59DataCoordinate. 

60""" 

61 

62DataIdValue = Union[str, int, None] 

63"""Type annotation alias for the values that can be present in a 

64DataCoordinate or other data ID. 

65""" 

66 

67 

68def _intersectRegions(*args: Region) -> Optional[Region]: 

69 """Return the intersection of several regions. 

70 

71 For internal use by `ExpandedDataCoordinate` only. 

72 

73 If no regions are provided, returns `None`. 

74 

75 This is currently a placeholder; it actually returns `NotImplemented` 

76 (it does *not* raise an exception) when multiple regions are given, which 

77 propagates to `ExpandedDataCoordinate`. This reflects the fact that we 

78 don't want to fail to construct an `ExpandedDataCoordinate` entirely when 

79 we can't compute its region, and at present we don't have a high-level use 

80 case for the regions of these particular data IDs. 

81 """ 

82 if len(args) == 0: 

83 return None 

84 elif len(args) == 1: 

85 return args[0] 

86 else: 

87 return NotImplemented 

88 

89 

90class DataCoordinate(NamedKeyMapping[Dimension, DataIdValue]): 

91 """An immutable data ID dictionary that guarantees that its key-value pairs 

92 identify at least all required dimensions in a `DimensionGraph`. 

93 

94 `DataCoordinateSet` itself is an ABC, but provides `staticmethod` factory 

95 functions for private concrete implementations that should be sufficient 

96 for most purposes. `standardize` is the most flexible and safe of these; 

97 the others (`makeEmpty`, `fromRequiredValues`, and `fromFullValues`) are 

98 more specialized and perform little or no checking of inputs. 

99 

100 Notes 

101 ----- 

102 Like any data ID class, `DataCoordinate` behaves like a dictionary, but 

103 with some subtleties: 

104 

105 - Both `Dimension` instances and `str` names thereof may be used as keys 

106 in lookup operations, but iteration (and `keys`) will yield `Dimension` 

107 instances. The `names` property can be used to obtain the corresponding 

108 `str` names. 

109 

110 - Lookups for implied dimensions (those in ``self.graph.implied``) are 

111 supported if and only if `hasFull` returns `True`, and are never 

112 included in iteration or `keys`. The `full` property may be used to 

113 obtain a mapping whose keys do include implied dimensions. 

114 

115 - Equality comparison with other mappings is supported, but it always 

116 considers only required dimensions (as well as requiring both operands 

117 to identify the same dimensions). This is not quite consistent with the 

118 way mappings usually work - normally differing keys imply unequal 

119 mappings - but it makes sense in this context because data IDs with the 

120 same values for required dimensions but different values for implied 

121 dimensions represent a serious problem with the data that 

122 `DataCoordinate` cannot generally recognize on its own, and a data ID 

123 that knows implied dimension values should still be able to compare as 

124 equal to one that does not. This is of course not the way comparisons 

125 between simple `dict` data IDs work, and hence using a `DataCoordinate` 

126 instance for at least one operand in any data ID comparison is strongly 

127 recommended. 

128 """ 

129 

130 __slots__ = () 

131 

132 @staticmethod 

133 def standardize( 

134 mapping: Optional[NameLookupMapping[Dimension, DataIdValue]] = None, 

135 *, 

136 graph: Optional[DimensionGraph] = None, 

137 universe: Optional[DimensionUniverse] = None, 

138 **kwargs: Any 

139 ) -> DataCoordinate: 

140 """Adapt an arbitrary mapping and/or additional arguments into a true 

141 `DataCoordinate`, or augment an existing one. 

142 

143 Parameters 

144 ---------- 

145 mapping : `~collections.abc.Mapping`, optional 

146 An informal data ID that maps dimensions or dimension names to 

147 their primary key values (may also be a true `DataCoordinate`). 

148 graph : `DimensionGraph` 

149 The dimensions to be identified by the new `DataCoordinate`. 

150 If not provided, will be inferred from the keys of ``mapping``, 

151 and ``universe`` must be provided unless ``mapping`` is already a 

152 `DataCoordinate`. 

153 universe : `DimensionUniverse` 

154 All known dimensions and their relationships; used to expand 

155 and validate dependencies when ``graph`` is not provided. 

156 **kwargs 

157 Additional keyword arguments are treated like additional key-value 

158 pairs in ``mapping``. 

159 

160 Returns 

161 ------- 

162 coordinate : `DataCoordinate` 

163 A validated `DataCoordinate` instance. 

164 

165 Raises 

166 ------ 

167 TypeError 

168 Raised if the set of optional arguments provided is not supported. 

169 KeyError 

170 Raised if a key-value pair for a required dimension is missing. 

171 """ 

172 d: Dict[str, DataIdValue] = {} 

173 if isinstance(mapping, DataCoordinate): 

174 if graph is None: 

175 if not kwargs: 

176 # Already standardized to exactly what we want. 

177 return mapping 

178 elif kwargs.keys().isdisjoint(graph.dimensions.names): 

179 # User provided kwargs, but told us not to use them by 

180 # passing in dimensions that are disjoint from those kwargs. 

181 # This is not necessarily user error - it's a useful pattern 

182 # to pass in all of the key-value pairs you have and let the 

183 # code here pull out only what it needs. 

184 return mapping.subset(graph) 

185 assert universe is None or universe == mapping.universe 

186 universe = mapping.universe 

187 d.update((name, mapping[name]) for name in mapping.graph.required.names) 

188 if mapping.hasFull(): 

189 d.update((name, mapping[name]) for name in mapping.graph.implied.names) 

190 elif isinstance(mapping, NamedKeyMapping): 

191 d.update(mapping.byName()) 

192 elif mapping is not None: 

193 d.update(mapping) 

194 d.update(kwargs) 

195 if graph is None: 

196 if universe is None: 

197 raise TypeError("universe must be provided if graph is not.") 

198 graph = DimensionGraph(universe, names=d.keys()) 

199 if not graph.dimensions: 

200 return DataCoordinate.makeEmpty(graph.universe) 

201 if d.keys() >= graph.dimensions.names: 

202 values = tuple(d[name] for name in graph._dataCoordinateIndices.keys()) 

203 else: 

204 try: 

205 values = tuple(d[name] for name in graph.required.names) 

206 except KeyError as err: 

207 raise KeyError(f"No value in data ID ({mapping}) for required dimension {err}.") from err 

208 # Some backends cannot handle numpy.int64 type which is a subclass of 

209 # numbers.Integral; convert that to int. 

210 values = tuple(int(val) if isinstance(val, numbers.Integral) # type: ignore 

211 else val for val in values) 

212 return _BasicTupleDataCoordinate(graph, values) 

213 

214 @staticmethod 

215 def makeEmpty(universe: DimensionUniverse) -> DataCoordinate: 

216 """Return an empty `DataCoordinate` that identifies the null set of 

217 dimensions. 

218 

219 Parameters 

220 ---------- 

221 universe : `DimensionUniverse` 

222 Universe to which this null dimension set belongs. 

223 

224 Returns 

225 ------- 

226 dataId : `DataCoordinate` 

227 A data ID object that identifies no dimensions. `hasFull` and 

228 `hasRecords` are guaranteed to return `True`, because both `full` 

229 and `records` are just empty mappings. 

230 """ 

231 return _ExpandedTupleDataCoordinate(universe.empty, (), {}) 

232 

233 @staticmethod 

234 def fromRequiredValues(graph: DimensionGraph, values: Tuple[DataIdValue, ...]) -> DataCoordinate: 

235 """Construct a `DataCoordinate` from a tuple of dimension values that 

236 identify only required dimensions. 

237 

238 This is a low-level interface with at most assertion-level checking of 

239 inputs. Most callers should use `standardize` instead. 

240 

241 Parameters 

242 ---------- 

243 graph : `DimensionGraph` 

244 Dimensions this data ID will identify. 

245 values : `tuple` [ `int` or `str` ] 

246 Tuple of primary key values corresponding to ``graph.required``, 

247 in that order. 

248 

249 Returns 

250 ------- 

251 dataId : `DataCoordinate` 

252 A data ID object that identifies the given dimensions. 

253 ``dataId.hasFull()`` will return `True` if and only if 

254 ``graph.implied`` is empty, and ``dataId.hasRecords()`` will never 

255 return `True`. 

256 """ 

257 assert len(graph.required) == len(values), \ 

258 f"Inconsistency between dimensions {graph.required} and required values {values}." 

259 return _BasicTupleDataCoordinate(graph, values) 

260 

261 @staticmethod 

262 def fromFullValues(graph: DimensionGraph, values: Tuple[DataIdValue, ...]) -> DataCoordinate: 

263 """Construct a `DataCoordinate` from a tuple of dimension values that 

264 identify all dimensions. 

265 

266 This is a low-level interface with at most assertion-level checking of 

267 inputs. Most callers should use `standardize` instead. 

268 

269 Parameters 

270 ---------- 

271 graph : `DimensionGraph` 

272 Dimensions this data ID will identify. 

273 values : `tuple` [ `int` or `str` ] 

274 Tuple of primary key values corresponding to 

275 ``itertools.chain(graph.required, graph.implied)``, in that order. 

276 Note that this is _not_ the same order as ``graph.dimensions``, 

277 though these contain the same elements. 

278 

279 Returns 

280 ------- 

281 dataId : `DataCoordinate` 

282 A data ID object that identifies the given dimensions. 

283 ``dataId.hasFull()`` will return `True` if and only if 

284 ``graph.implied`` is empty, and ``dataId.hasRecords()`` will never 

285 return `True`. 

286 """ 

287 assert len(graph.dimensions) == len(values), \ 

288 f"Inconsistency between dimensions {graph.dimensions} and full values {values}." 

289 return _BasicTupleDataCoordinate(graph, values) 

290 

291 def __hash__(self) -> int: 

292 return hash((self.graph,) + tuple(self[d.name] for d in self.graph.required)) 

293 

294 def __eq__(self, other: Any) -> bool: 

295 if not isinstance(other, DataCoordinate): 

296 other = DataCoordinate.standardize(other, universe=self.universe) 

297 return self.graph == other.graph and all(self[d.name] == other[d.name] for d in self.graph.required) 

298 

299 def __repr__(self) -> str: 

300 # We can't make repr yield something that could be exec'd here without 

301 # printing out the whole DimensionUniverse the graph is derived from. 

302 # So we print something that mostly looks like a dict, but doesn't 

303 # quote its keys: that's both more compact and something that can't 

304 # be mistaken for an actual dict or something that could be exec'd. 

305 return "{{{}}}".format( 

306 ', '.join(f"{d}: {self.get(d, '?')}" for d in self.graph.dimensions.names) 

307 ) 

308 

309 def __iter__(self) -> Iterator[Dimension]: 

310 return iter(self.keys()) 

311 

312 def __len__(self) -> int: 

313 return len(self.keys()) 

314 

315 def keys(self) -> NamedValueSet[Dimension]: 

316 return self.graph.required 

317 

318 @property 

319 def names(self) -> AbstractSet[str]: 

320 """The names of the required dimensions identified by this data ID, in 

321 the same order as `keys` (`collections.abc.Set` [ `str` ]). 

322 """ 

323 return self.keys().names 

324 

325 @abstractmethod 

326 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

327 """Return a `DataCoordinate` whose graph is a subset of ``self.graph``. 

328 

329 Parameters 

330 ---------- 

331 graph : `DimensionGraph` 

332 The dimensions identified by the returned `DataCoordinate`. 

333 

334 Returns 

335 ------- 

336 coordinate : `DataCoordinate` 

337 A `DataCoordinate` instance that identifies only the given 

338 dimensions. May be ``self`` if ``graph == self.graph``. 

339 

340 Raises 

341 ------ 

342 KeyError 

343 Raised if the primary key value for one or more required dimensions 

344 is unknown. This may happen if ``graph.issubset(self.graph)`` is 

345 `False`, or even if ``graph.issubset(self.graph)`` is `True`, if 

346 ``self.hasFull()`` is `False` and 

347 ``graph.required.issubset(self.graph.required)`` is `False`. As 

348 an example of the latter case, consider trying to go from a data ID 

349 with dimensions {instrument, physical_filter, abstract_filter} to 

350 just {instrument, abstract_filter}; abstract_filter is implied by 

351 physical_filter and hence would have no value in the original data 

352 ID if ``self.hasFull()`` is `False`. 

353 

354 Notes 

355 ----- 

356 If `hasFull` and `hasRecords` return `True` on ``self``, they will 

357 return `True` (respectively) on the returned `DataCoordinate` as well. 

358 The converse does not hold. 

359 """ 

360 raise NotImplementedError() 

361 

362 @abstractmethod 

363 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

364 ) -> DataCoordinate: 

365 """Return a `DataCoordinate` that holds the given records and 

366 guarantees that `hasRecords` returns `True`. 

367 

368 This is a low-level interface with at most assertion-level checking of 

369 inputs. Most callers should use `Registry.expandDataId` instead. 

370 

371 Parameters 

372 ---------- 

373 records : `Mapping` [ `str`, `DimensionRecord` or `None` ] 

374 A `NamedKeyMapping` with `DimensionElement` keys or a regular 

375 `Mapping` with `str` (`DimensionElement` name) keys and 

376 `DimensionRecord` values. Keys must cover all elements in 

377 ``self.graph.elements``. Values may be `None`, but only to reflect 

378 actual NULL values in the database, not just records that have not 

379 been fetched. 

380 """ 

381 raise NotImplementedError() 

382 

383 @property 

384 def universe(self) -> DimensionUniverse: 

385 """The universe that defines all known dimensions compatible with 

386 this coordinate (`DimensionUniverse`). 

387 """ 

388 return self.graph.universe 

389 

390 @property 

391 @abstractmethod 

392 def graph(self) -> DimensionGraph: 

393 """The dimensions identified by this data ID (`DimensionGraph`). 

394 

395 Note that values are only required to be present for dimensions in 

396 ``self.graph.required``; all others may be retrieved (from a 

397 `Registry`) given these. 

398 """ 

399 raise NotImplementedError() 

400 

401 @abstractmethod 

402 def hasFull(self) -> bool: 

403 """Whether this data ID contains values for implied as well as 

404 required dimensions. 

405 

406 Returns 

407 ------- 

408 state : `bool` 

409 If `True`, `__getitem__`, `get`, and `__contains__` (but not 

410 `keys`!) will act as though the mapping includes key-value pairs 

411 for implied dimensions, and the `full` property may be used. If 

412 `False`, these operations only include key-value pairs for required 

413 dimensions, and accessing `full` is an error. Always `True` if 

414 there are no implied dimensions. 

415 """ 

416 raise NotImplementedError() 

417 

418 @property 

419 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]: 

420 """A mapping that includes key-value pairs for all dimensions in 

421 ``self.graph``, including implied (`NamedKeyMapping`). 

422 

423 Accessing this attribute if `hasFull` returns `False` is a logic error 

424 that may raise an exception of unspecified type either immediately or 

425 when implied keys are accessed via the returned mapping, depending on 

426 the implementation and whether assertions are enabled. 

427 """ 

428 assert self.hasFull(), "full may only be accessed if hasRecords() returns True." 

429 return _DataCoordinateFullView(self) 

430 

431 @abstractmethod 

432 def hasRecords(self) -> bool: 

433 """Whether this data ID contains records for all of the dimension 

434 elements it identifies. 

435 

436 Returns 

437 ------- 

438 state : `bool` 

439 If `True`, the following attributes may be accessed: 

440 

441 - `records` 

442 - `region` 

443 - `timespan` 

444 - `pack` 

445 

446 If `False`, accessing any of these is considered a logic error. 

447 """ 

448 raise NotImplementedError() 

449 

450 @property 

451 def records(self) -> NamedKeyMapping[DimensionElement, Optional[DimensionRecord]]: 

452 """A mapping that contains `DimensionRecord` objects for all elements 

453 identified by this data ID (`NamedKeyMapping`). 

454 

455 The values of this mapping may be `None` if and only if there is no 

456 record for that element with these dimensions in the database (which 

457 means some foreign key field must have a NULL value). 

458 

459 Accessing this attribute if `hasRecords` returns `False` is a logic 

460 error that may raise an exception of unspecified type either 

461 immediately or when the returned mapping is used, depending on the 

462 implementation and whether assertions are enabled. 

463 """ 

464 assert self.hasRecords(), "records may only be accessed if hasRecords() returns True." 

465 return _DataCoordinateRecordsView(self) 

466 

467 @abstractmethod 

468 def _record(self, name: str) -> Optional[DimensionRecord]: 

469 """Protected implementation hook that backs the ``records`` attribute. 

470 

471 Parameters 

472 ---------- 

473 name : `str` 

474 The name of a `DimensionElement`, guaranteed to be in 

475 ``self.graph.elements.names``. 

476 

477 Returns 

478 ------- 

479 record : `DimensionRecord` or `None` 

480 The dimension record for the given element identified by this 

481 data ID, or `None` if there is no such record. 

482 """ 

483 raise NotImplementedError() 

484 

485 @property 

486 def region(self) -> Optional[Region]: 

487 """The spatial region associated with this data ID 

488 (`lsst.sphgeom.Region` or `None`). 

489 

490 This is `None` if and only if ``self.graph.spatial`` is empty. 

491 

492 Accessing this attribute if `hasRecords` returns `False` is a logic 

493 error that may or may not raise an exception, depending on the 

494 implementation and whether assertions are enabled. 

495 """ 

496 assert self.hasRecords(), "region may only be accessed if hasRecords() returns True." 

497 regions = [] 

498 for element in self.graph.spatial: 

499 record = self._record(element.name) 

500 # DimensionRecord subclasses for spatial elements always have a 

501 # .region, but they're dynamic so this can't be type-checked. 

502 if record is None or record.region is None: # type: ignore 

503 return None 

504 else: 

505 regions.append(record.region) # type:ignore 

506 return _intersectRegions(*regions) 

507 

508 @property 

509 def timespan(self) -> Optional[Timespan[astropy.time.Time]]: 

510 """The temporal interval associated with this data ID 

511 (`Timespan` or `None`). 

512 

513 This is `None` if and only if ``self.graph.timespan`` is empty. 

514 

515 Accessing this attribute if `hasRecords` returns `False` is a logic 

516 error that may or may not raise an exception, depending on the 

517 implementation and whether assertions are enabled. 

518 """ 

519 assert self.hasRecords(), "timespan may only be accessed if hasRecords() returns True." 

520 timespans = [] 

521 for element in self.graph.temporal: 

522 record = self._record(element.name) 

523 # DimensionRecord subclasses for temporal elements always have 

524 # .timespan, but they're dynamic so this can't be type-checked. 

525 if record is None or record.timespan is None: # type:ignore 

526 return None 

527 else: 

528 timespans.append(record.timespan) # type:ignore 

529 return Timespan.intersection(*timespans) 

530 

531 def pack(self, name: str, *, returnMaxBits: bool = False) -> Union[Tuple[int, int], int]: 

532 """Pack this data ID into an integer. 

533 

534 Parameters 

535 ---------- 

536 name : `str` 

537 Name of the `DimensionPacker` algorithm (as defined in the 

538 dimension configuration). 

539 returnMaxBits : `bool`, optional 

540 If `True` (`False` is default), return the maximum number of 

541 nonzero bits in the returned integer across all data IDs. 

542 

543 Returns 

544 ------- 

545 packed : `int` 

546 Integer ID. This ID is unique only across data IDs that have 

547 the same values for the packer's "fixed" dimensions. 

548 maxBits : `int`, optional 

549 Maximum number of nonzero bits in ``packed``. Not returned unless 

550 ``returnMaxBits`` is `True`. 

551 

552 Notes 

553 ----- 

554 Accessing this attribute if `hasRecords` returns `False` is a logic 

555 error that may or may not raise an exception, depending on the 

556 implementation and whether assertions are enabled. 

557 """ 

558 assert self.hasRecords(), "pack() may only be called if hasRecords() returns True." 

559 return self.universe.makePacker(name, self).pack(self, returnMaxBits=returnMaxBits) 

560 

561 

562DataId = Union[DataCoordinate, Mapping[str, Any]] 

563"""A type-annotation alias for signatures that accept both informal data ID 

564dictionaries and validated `DataCoordinate` instances. 

565""" 

566 

567 

568class _DataCoordinateFullView(NamedKeyMapping[Dimension, DataIdValue]): 

569 """View class that provides the default implementation for 

570 `DataCoordinate.full`. 

571 

572 Parameters 

573 ---------- 

574 target : `DataCoordinate` 

575 The `DataCoordinate` instance this object provides a view of. 

576 """ 

577 def __init__(self, target: DataCoordinate): 

578 self._target = target 

579 

580 __slots__ = ("_target",) 

581 

582 def __getitem__(self, key: DataIdKey) -> DataIdValue: 

583 return self._target[key] 

584 

585 def __iter__(self) -> Iterator[Dimension]: 

586 return iter(self.keys()) 

587 

588 def __len__(self) -> int: 

589 return len(self.keys()) 

590 

591 def keys(self) -> NamedValueSet[Dimension]: 

592 return self._target.graph.dimensions 

593 

594 @property 

595 def names(self) -> AbstractSet[str]: 

596 # Docstring inherited from `NamedKeyMapping`. 

597 return self.keys().names 

598 

599 

600class _DataCoordinateRecordsView(NamedKeyMapping[DimensionElement, Optional[DimensionRecord]]): 

601 """View class that provides the default implementation for 

602 `DataCoordinate.records`. 

603 

604 Parameters 

605 ---------- 

606 target : `DataCoordinate` 

607 The `DataCoordinate` instance this object provides a view of. 

608 """ 

609 def __init__(self, target: DataCoordinate): 

610 self._target = target 

611 

612 __slots__ = ("_target",) 

613 

614 def __getitem__(self, key: Union[DimensionElement, str]) -> Optional[DimensionRecord]: 

615 if isinstance(key, DimensionElement): 

616 key = key.name 

617 return self._target._record(key) 

618 

619 def __iter__(self) -> Iterator[DimensionElement]: 

620 return iter(self.keys()) 

621 

622 def __len__(self) -> int: 

623 return len(self.keys()) 

624 

625 def keys(self) -> NamedValueSet[DimensionElement]: 

626 return self._target.graph.elements 

627 

628 @property 

629 def names(self) -> AbstractSet[str]: 

630 # Docstring inherited from `NamedKeyMapping`. 

631 return self.keys().names 

632 

633 

634class _BasicTupleDataCoordinate(DataCoordinate): 

635 """Standard implementation of `DataCoordinate`, backed by a tuple of 

636 values. 

637 

638 This class should only be accessed outside this module via the 

639 `DataCoordinate` interface, and should only be constructed via the static 

640 methods there. 

641 

642 Parameters 

643 ---------- 

644 graph : `DimensionGraph` 

645 The dimensions to be identified. 

646 values : `tuple` [ `int` or `str` ] 

647 Data ID values, ordered to match ``graph._dataCoordinateIndices``. May 

648 include values for just required dimensions (which always come first) 

649 or all dimensions. 

650 """ 

651 def __init__(self, graph: DimensionGraph, values: Tuple[DataIdValue, ...]): 

652 self._graph = graph 

653 self._values = values 

654 

655 __slots__ = ("_graph", "_values") 

656 

657 @property 

658 def graph(self) -> DimensionGraph: 

659 # Docstring inherited from DataCoordinate. 

660 return self._graph 

661 

662 def __getitem__(self, key: DataIdKey) -> DataIdValue: 

663 # Docstring inherited from DataCoordinate. 

664 if isinstance(key, Dimension): 

665 key = key.name 

666 index = self._graph._dataCoordinateIndices[key] 

667 try: 

668 return self._values[index] 

669 except IndexError: 

670 # Caller asked for an implied dimension, but this object only has 

671 # values for the required ones. 

672 raise KeyError(key) 

673 

674 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

675 # Docstring inherited from DataCoordinate. 

676 if self._graph == graph: 

677 return self 

678 elif self.hasFull() or self._graph.required.issuperset(graph.dimensions): 

679 return _BasicTupleDataCoordinate( 

680 graph, 

681 tuple(self[k] for k in graph._dataCoordinateIndices.keys()), 

682 ) 

683 else: 

684 return _BasicTupleDataCoordinate(graph, tuple(self[k] for k in graph.required.names)) 

685 

686 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

687 ) -> DataCoordinate: 

688 # Docstring inherited from DataCoordinate 

689 values = self._values 

690 if not self.hasFull(): 

691 # Extract a complete values tuple from the attributes of the given 

692 # records. It's possible for these to be inconsistent with 

693 # self._values (which is a serious problem, of course), but we've 

694 # documented this as a no-checking API. 

695 values += tuple(getattr(records[d.name], d.primaryKey.name) for d in self._graph.implied) 

696 return _ExpandedTupleDataCoordinate(self._graph, values, records) 

697 

698 def hasFull(self) -> bool: 

699 # Docstring inherited from DataCoordinate. 

700 return len(self._values) == len(self._graph._dataCoordinateIndices) 

701 

702 def hasRecords(self) -> bool: 

703 # Docstring inherited from DataCoordinate. 

704 return False 

705 

706 def _record(self, name: str) -> Optional[DimensionRecord]: 

707 # Docstring inherited from DataCoordinate. 

708 assert False 

709 

710 

711class _ExpandedTupleDataCoordinate(_BasicTupleDataCoordinate): 

712 """A `DataCoordinate` implementation that can hold `DimensionRecord` 

713 objects. 

714 

715 This class should only be accessed outside this module via the 

716 `DataCoordinate` interface, and should only be constructed via calls to 

717 `DataCoordinate.expanded`. 

718 

719 Parameters 

720 ---------- 

721 graph : `DimensionGraph` 

722 The dimensions to be identified. 

723 values : `tuple` [ `int` or `str` ] 

724 Data ID values, ordered to match ``graph._dataCoordinateIndices``. 

725 May include values for just required dimensions (which always come 

726 first) or all dimensions. 

727 records : `Mapping` [ `str`, `DimensionRecord` or `None` ] 

728 A `NamedKeyMapping` with `DimensionElement` keys or a regular 

729 `Mapping` with `str` (`DimensionElement` name) keys and 

730 `DimensionRecord` values. Keys must cover all elements in 

731 ``self.graph.elements``. Values may be `None`, but only to reflect 

732 actual NULL values in the database, not just records that have not 

733 been fetched. 

734 """ 

735 def __init__(self, graph: DimensionGraph, values: Tuple[DataIdValue, ...], 

736 records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]]): 

737 super().__init__(graph, values) 

738 assert super().hasFull(), "This implementation requires full dimension records." 

739 self._records = records 

740 

741 __slots__ = ("_records",) 

742 

743 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

744 # Docstring inherited from DataCoordinate. 

745 if self._graph == graph: 

746 return self 

747 return _ExpandedTupleDataCoordinate(graph, 

748 tuple(self[k] for k in graph._dataCoordinateIndices.keys()), 

749 records=self._records) 

750 

751 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

752 ) -> DataCoordinate: 

753 # Docstring inherited from DataCoordinate. 

754 return self 

755 

756 def hasFull(self) -> bool: 

757 # Docstring inherited from DataCoordinate. 

758 return True 

759 

760 def hasRecords(self) -> bool: 

761 # Docstring inherited from DataCoordinate. 

762 return True 

763 

764 def _record(self, name: str) -> Optional[DimensionRecord]: 

765 # Docstring inherited from DataCoordinate. 

766 return self._records[name]