Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22# 

23# Design notes for this module are in 

24# doc/lsst.daf.butler/dev/dataCoordinate.py. 

25# 

26 

27from __future__ import annotations 

28 

29__all__ = ("DataCoordinate", "DataId") 

30 

31from abc import abstractmethod 

32import numbers 

33from typing import ( 

34 AbstractSet, 

35 Any, 

36 Dict, 

37 Iterator, 

38 Mapping, 

39 Optional, 

40 Tuple, 

41 TYPE_CHECKING, 

42 Union, 

43) 

44 

45from lsst.sphgeom import Region 

46from ..named import NamedKeyMapping, NameLookupMapping, NamedValueSet 

47from ..timespan import Timespan 

48from .elements import Dimension, DimensionElement 

49from .graph import DimensionGraph 

50from .records import DimensionRecord 

51 

52if TYPE_CHECKING: # Imports needed only for type annotations; may be circular. 52 ↛ 53line 52 didn't jump to line 53, because the condition on line 52 was never true

53 from .universe import DimensionUniverse 

54 

55DataIdKey = Union[str, Dimension] 

56"""Type annotation alias for the keys that can be used to index a 

57DataCoordinate. 

58""" 

59 

60DataIdValue = Union[str, int, None] 

61"""Type annotation alias for the values that can be present in a 

62DataCoordinate or other data ID. 

63""" 

64 

65 

66def _intersectRegions(*args: Region) -> Optional[Region]: 

67 """Return the intersection of several regions. 

68 

69 For internal use by `ExpandedDataCoordinate` only. 

70 

71 If no regions are provided, returns `None`. 

72 

73 This is currently a placeholder; it actually returns `NotImplemented` 

74 (it does *not* raise an exception) when multiple regions are given, which 

75 propagates to `ExpandedDataCoordinate`. This reflects the fact that we 

76 don't want to fail to construct an `ExpandedDataCoordinate` entirely when 

77 we can't compute its region, and at present we don't have a high-level use 

78 case for the regions of these particular data IDs. 

79 """ 

80 if len(args) == 0: 

81 return None 

82 elif len(args) == 1: 

83 return args[0] 

84 else: 

85 return NotImplemented 

86 

87 

88class DataCoordinate(NamedKeyMapping[Dimension, DataIdValue]): 

89 """An immutable data ID dictionary that guarantees that its key-value pairs 

90 identify at least all required dimensions in a `DimensionGraph`. 

91 

92 `DataCoordinateSet` itself is an ABC, but provides `staticmethod` factory 

93 functions for private concrete implementations that should be sufficient 

94 for most purposes. `standardize` is the most flexible and safe of these; 

95 the others (`makeEmpty`, `fromRequiredValues`, and `fromFullValues`) are 

96 more specialized and perform little or no checking of inputs. 

97 

98 Notes 

99 ----- 

100 Like any data ID class, `DataCoordinate` behaves like a dictionary, but 

101 with some subtleties: 

102 

103 - Both `Dimension` instances and `str` names thereof may be used as keys 

104 in lookup operations, but iteration (and `keys`) will yield `Dimension` 

105 instances. The `names` property can be used to obtain the corresponding 

106 `str` names. 

107 

108 - Lookups for implied dimensions (those in ``self.graph.implied``) are 

109 supported if and only if `hasFull` returns `True`, and are never 

110 included in iteration or `keys`. The `full` property may be used to 

111 obtain a mapping whose keys do include implied dimensions. 

112 

113 - Equality comparison with other mappings is supported, but it always 

114 considers only required dimensions (as well as requiring both operands 

115 to identify the same dimensions). This is not quite consistent with the 

116 way mappings usually work - normally differing keys imply unequal 

117 mappings - but it makes sense in this context because data IDs with the 

118 same values for required dimensions but different values for implied 

119 dimensions represent a serious problem with the data that 

120 `DataCoordinate` cannot generally recognize on its own, and a data ID 

121 that knows implied dimension values should still be able to compare as 

122 equal to one that does not. This is of course not the way comparisons 

123 between simple `dict` data IDs work, and hence using a `DataCoordinate` 

124 instance for at least one operand in any data ID comparison is strongly 

125 recommended. 

126 """ 

127 

128 __slots__ = () 

129 

130 @staticmethod 

131 def standardize( 

132 mapping: Optional[NameLookupMapping[Dimension, DataIdValue]] = None, 

133 *, 

134 graph: Optional[DimensionGraph] = None, 

135 universe: Optional[DimensionUniverse] = None, 

136 **kwargs: Any 

137 ) -> DataCoordinate: 

138 """Adapt an arbitrary mapping and/or additional arguments into a true 

139 `DataCoordinate`, or augment an existing one. 

140 

141 Parameters 

142 ---------- 

143 mapping : `~collections.abc.Mapping`, optional 

144 An informal data ID that maps dimensions or dimension names to 

145 their primary key values (may also be a true `DataCoordinate`). 

146 graph : `DimensionGraph` 

147 The dimensions to be identified by the new `DataCoordinate`. 

148 If not provided, will be inferred from the keys of ``mapping``, 

149 and ``universe`` must be provided unless ``mapping`` is already a 

150 `DataCoordinate`. 

151 universe : `DimensionUniverse` 

152 All known dimensions and their relationships; used to expand 

153 and validate dependencies when ``graph`` is not provided. 

154 **kwargs 

155 Additional keyword arguments are treated like additional key-value 

156 pairs in ``mapping``. 

157 

158 Returns 

159 ------- 

160 coordinate : `DataCoordinate` 

161 A validated `DataCoordinate` instance. 

162 

163 Raises 

164 ------ 

165 TypeError 

166 Raised if the set of optional arguments provided is not supported. 

167 KeyError 

168 Raised if a key-value pair for a required dimension is missing. 

169 """ 

170 d: Dict[str, DataIdValue] = {} 

171 if isinstance(mapping, DataCoordinate): 

172 if graph is None: 

173 if not kwargs: 

174 # Already standardized to exactly what we want. 

175 return mapping 

176 elif kwargs.keys().isdisjoint(graph.dimensions.names): 

177 # User provided kwargs, but told us not to use them by 

178 # passing in dimensions that are disjoint from those kwargs. 

179 # This is not necessarily user error - it's a useful pattern 

180 # to pass in all of the key-value pairs you have and let the 

181 # code here pull out only what it needs. 

182 return mapping.subset(graph) 

183 assert universe is None or universe == mapping.universe 

184 universe = mapping.universe 

185 d.update((name, mapping[name]) for name in mapping.graph.required.names) 

186 if mapping.hasFull(): 

187 d.update((name, mapping[name]) for name in mapping.graph.implied.names) 

188 elif isinstance(mapping, NamedKeyMapping): 

189 d.update(mapping.byName()) 

190 elif mapping is not None: 

191 d.update(mapping) 

192 d.update(kwargs) 

193 if graph is None: 

194 if universe is None: 

195 raise TypeError("universe must be provided if graph is not.") 

196 graph = DimensionGraph(universe, names=d.keys()) 

197 if not graph.dimensions: 

198 return DataCoordinate.makeEmpty(graph.universe) 

199 if d.keys() >= graph.dimensions.names: 

200 values = tuple(d[name] for name in graph._dataCoordinateIndices.keys()) 

201 else: 

202 try: 

203 values = tuple(d[name] for name in graph.required.names) 

204 except KeyError as err: 

205 raise KeyError(f"No value in data ID ({mapping}) for required dimension {err}.") from err 

206 # Some backends cannot handle numpy.int64 type which is a subclass of 

207 # numbers.Integral; convert that to int. 

208 values = tuple(int(val) if isinstance(val, numbers.Integral) # type: ignore 

209 else val for val in values) 

210 return _BasicTupleDataCoordinate(graph, values) 

211 

212 @staticmethod 

213 def makeEmpty(universe: DimensionUniverse) -> DataCoordinate: 

214 """Return an empty `DataCoordinate` that identifies the null set of 

215 dimensions. 

216 

217 Parameters 

218 ---------- 

219 universe : `DimensionUniverse` 

220 Universe to which this null dimension set belongs. 

221 

222 Returns 

223 ------- 

224 dataId : `DataCoordinate` 

225 A data ID object that identifies no dimensions. `hasFull` and 

226 `hasRecords` are guaranteed to return `True`, because both `full` 

227 and `records` are just empty mappings. 

228 """ 

229 return _ExpandedTupleDataCoordinate(universe.empty, (), {}) 

230 

231 @staticmethod 

232 def fromRequiredValues(graph: DimensionGraph, values: Tuple[DataIdValue, ...]) -> DataCoordinate: 

233 """Construct a `DataCoordinate` from a tuple of dimension values that 

234 identify only required dimensions. 

235 

236 This is a low-level interface with at most assertion-level checking of 

237 inputs. Most callers should use `standardize` instead. 

238 

239 Parameters 

240 ---------- 

241 graph : `DimensionGraph` 

242 Dimensions this data ID will identify. 

243 values : `tuple` [ `int` or `str` ] 

244 Tuple of primary key values corresponding to ``graph.required``, 

245 in that order. 

246 

247 Returns 

248 ------- 

249 dataId : `DataCoordinate` 

250 A data ID object that identifies the given dimensions. 

251 ``dataId.hasFull()`` will return `True` if and only if 

252 ``graph.implied`` is empty, and ``dataId.hasRecords()`` will never 

253 return `True`. 

254 """ 

255 assert len(graph.required) == len(values), \ 

256 f"Inconsistency between dimensions {graph.required} and required values {values}." 

257 return _BasicTupleDataCoordinate(graph, values) 

258 

259 @staticmethod 

260 def fromFullValues(graph: DimensionGraph, values: Tuple[DataIdValue, ...]) -> DataCoordinate: 

261 """Construct a `DataCoordinate` from a tuple of dimension values that 

262 identify all dimensions. 

263 

264 This is a low-level interface with at most assertion-level checking of 

265 inputs. Most callers should use `standardize` instead. 

266 

267 Parameters 

268 ---------- 

269 graph : `DimensionGraph` 

270 Dimensions this data ID will identify. 

271 values : `tuple` [ `int` or `str` ] 

272 Tuple of primary key values corresponding to 

273 ``itertools.chain(graph.required, graph.implied)``, in that order. 

274 Note that this is _not_ the same order as ``graph.dimensions``, 

275 though these contain the same elements. 

276 

277 Returns 

278 ------- 

279 dataId : `DataCoordinate` 

280 A data ID object that identifies the given dimensions. 

281 ``dataId.hasFull()`` will return `True` if and only if 

282 ``graph.implied`` is empty, and ``dataId.hasRecords()`` will never 

283 return `True`. 

284 """ 

285 assert len(graph.dimensions) == len(values), \ 

286 f"Inconsistency between dimensions {graph.dimensions} and full values {values}." 

287 return _BasicTupleDataCoordinate(graph, values) 

288 

289 def __hash__(self) -> int: 

290 return hash((self.graph,) + tuple(self[d.name] for d in self.graph.required)) 

291 

292 def __eq__(self, other: Any) -> bool: 

293 if not isinstance(other, DataCoordinate): 

294 other = DataCoordinate.standardize(other, universe=self.universe) 

295 return self.graph == other.graph and all(self[d.name] == other[d.name] for d in self.graph.required) 

296 

297 def __repr__(self) -> str: 

298 # We can't make repr yield something that could be exec'd here without 

299 # printing out the whole DimensionUniverse the graph is derived from. 

300 # So we print something that mostly looks like a dict, but doesn't 

301 # quote its keys: that's both more compact and something that can't 

302 # be mistaken for an actual dict or something that could be exec'd. 

303 return "{{{}}}".format( 

304 ', '.join(f"{d}: {self.get(d, '?')}" for d in self.graph.dimensions.names) 

305 ) 

306 

307 def __iter__(self) -> Iterator[Dimension]: 

308 return iter(self.keys()) 

309 

310 def __len__(self) -> int: 

311 return len(self.keys()) 

312 

313 def keys(self) -> NamedValueSet[Dimension]: 

314 return self.graph.required 

315 

316 @property 

317 def names(self) -> AbstractSet[str]: 

318 """The names of the required dimensions identified by this data ID, in 

319 the same order as `keys` (`collections.abc.Set` [ `str` ]). 

320 """ 

321 return self.keys().names 

322 

323 @abstractmethod 

324 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

325 """Return a `DataCoordinate` whose graph is a subset of ``self.graph``. 

326 

327 Parameters 

328 ---------- 

329 graph : `DimensionGraph` 

330 The dimensions identified by the returned `DataCoordinate`. 

331 

332 Returns 

333 ------- 

334 coordinate : `DataCoordinate` 

335 A `DataCoordinate` instance that identifies only the given 

336 dimensions. May be ``self`` if ``graph == self.graph``. 

337 

338 Raises 

339 ------ 

340 KeyError 

341 Raised if the primary key value for one or more required dimensions 

342 is unknown. This may happen if ``graph.issubset(self.graph)`` is 

343 `False`, or even if ``graph.issubset(self.graph)`` is `True`, if 

344 ``self.hasFull()`` is `False` and 

345 ``graph.required.issubset(self.graph.required)`` is `False`. As 

346 an example of the latter case, consider trying to go from a data ID 

347 with dimensions {instrument, physical_filter, band} to 

348 just {instrument, band}; band is implied by 

349 physical_filter and hence would have no value in the original data 

350 ID if ``self.hasFull()`` is `False`. 

351 

352 Notes 

353 ----- 

354 If `hasFull` and `hasRecords` return `True` on ``self``, they will 

355 return `True` (respectively) on the returned `DataCoordinate` as well. 

356 The converse does not hold. 

357 """ 

358 raise NotImplementedError() 

359 

360 @abstractmethod 

361 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

362 ) -> DataCoordinate: 

363 """Return a `DataCoordinate` that holds the given records and 

364 guarantees that `hasRecords` returns `True`. 

365 

366 This is a low-level interface with at most assertion-level checking of 

367 inputs. Most callers should use `Registry.expandDataId` instead. 

368 

369 Parameters 

370 ---------- 

371 records : `Mapping` [ `str`, `DimensionRecord` or `None` ] 

372 A `NamedKeyMapping` with `DimensionElement` keys or a regular 

373 `Mapping` with `str` (`DimensionElement` name) keys and 

374 `DimensionRecord` values. Keys must cover all elements in 

375 ``self.graph.elements``. Values may be `None`, but only to reflect 

376 actual NULL values in the database, not just records that have not 

377 been fetched. 

378 """ 

379 raise NotImplementedError() 

380 

381 @property 

382 def universe(self) -> DimensionUniverse: 

383 """The universe that defines all known dimensions compatible with 

384 this coordinate (`DimensionUniverse`). 

385 """ 

386 return self.graph.universe 

387 

388 @property 

389 @abstractmethod 

390 def graph(self) -> DimensionGraph: 

391 """The dimensions identified by this data ID (`DimensionGraph`). 

392 

393 Note that values are only required to be present for dimensions in 

394 ``self.graph.required``; all others may be retrieved (from a 

395 `Registry`) given these. 

396 """ 

397 raise NotImplementedError() 

398 

399 @abstractmethod 

400 def hasFull(self) -> bool: 

401 """Whether this data ID contains values for implied as well as 

402 required dimensions. 

403 

404 Returns 

405 ------- 

406 state : `bool` 

407 If `True`, `__getitem__`, `get`, and `__contains__` (but not 

408 `keys`!) will act as though the mapping includes key-value pairs 

409 for implied dimensions, and the `full` property may be used. If 

410 `False`, these operations only include key-value pairs for required 

411 dimensions, and accessing `full` is an error. Always `True` if 

412 there are no implied dimensions. 

413 """ 

414 raise NotImplementedError() 

415 

416 @property 

417 def full(self) -> NamedKeyMapping[Dimension, DataIdValue]: 

418 """A mapping that includes key-value pairs for all dimensions in 

419 ``self.graph``, including implied (`NamedKeyMapping`). 

420 

421 Accessing this attribute if `hasFull` returns `False` is a logic error 

422 that may raise an exception of unspecified type either immediately or 

423 when implied keys are accessed via the returned mapping, depending on 

424 the implementation and whether assertions are enabled. 

425 """ 

426 assert self.hasFull(), "full may only be accessed if hasRecords() returns True." 

427 return _DataCoordinateFullView(self) 

428 

429 @abstractmethod 

430 def hasRecords(self) -> bool: 

431 """Whether this data ID contains records for all of the dimension 

432 elements it identifies. 

433 

434 Returns 

435 ------- 

436 state : `bool` 

437 If `True`, the following attributes may be accessed: 

438 

439 - `records` 

440 - `region` 

441 - `timespan` 

442 - `pack` 

443 

444 If `False`, accessing any of these is considered a logic error. 

445 """ 

446 raise NotImplementedError() 

447 

448 @property 

449 def records(self) -> NamedKeyMapping[DimensionElement, Optional[DimensionRecord]]: 

450 """A mapping that contains `DimensionRecord` objects for all elements 

451 identified by this data ID (`NamedKeyMapping`). 

452 

453 The values of this mapping may be `None` if and only if there is no 

454 record for that element with these dimensions in the database (which 

455 means some foreign key field must have a NULL value). 

456 

457 Accessing this attribute if `hasRecords` returns `False` is a logic 

458 error that may raise an exception of unspecified type either 

459 immediately or when the returned mapping is used, depending on the 

460 implementation and whether assertions are enabled. 

461 """ 

462 assert self.hasRecords(), "records may only be accessed if hasRecords() returns True." 

463 return _DataCoordinateRecordsView(self) 

464 

465 @abstractmethod 

466 def _record(self, name: str) -> Optional[DimensionRecord]: 

467 """Protected implementation hook that backs the ``records`` attribute. 

468 

469 Parameters 

470 ---------- 

471 name : `str` 

472 The name of a `DimensionElement`, guaranteed to be in 

473 ``self.graph.elements.names``. 

474 

475 Returns 

476 ------- 

477 record : `DimensionRecord` or `None` 

478 The dimension record for the given element identified by this 

479 data ID, or `None` if there is no such record. 

480 """ 

481 raise NotImplementedError() 

482 

483 @property 

484 def region(self) -> Optional[Region]: 

485 """The spatial region associated with this data ID 

486 (`lsst.sphgeom.Region` or `None`). 

487 

488 This is `None` if and only if ``self.graph.spatial`` is empty. 

489 

490 Accessing this attribute if `hasRecords` returns `False` is a logic 

491 error that may or may not raise an exception, depending on the 

492 implementation and whether assertions are enabled. 

493 """ 

494 assert self.hasRecords(), "region may only be accessed if hasRecords() returns True." 

495 regions = [] 

496 for element in self.graph.spatial: 

497 record = self._record(element.name) 

498 # DimensionRecord subclasses for spatial elements always have a 

499 # .region, but they're dynamic so this can't be type-checked. 

500 if record is None or record.region is None: # type: ignore 

501 return None 

502 else: 

503 regions.append(record.region) # type:ignore 

504 return _intersectRegions(*regions) 

505 

506 @property 

507 def timespan(self) -> Optional[Timespan]: 

508 """The temporal interval associated with this data ID 

509 (`Timespan` or `None`). 

510 

511 This is `None` if and only if ``self.graph.timespan`` is empty. 

512 

513 Accessing this attribute if `hasRecords` returns `False` is a logic 

514 error that may or may not raise an exception, depending on the 

515 implementation and whether assertions are enabled. 

516 """ 

517 assert self.hasRecords(), "timespan may only be accessed if hasRecords() returns True." 

518 timespans = [] 

519 for element in self.graph.temporal: 

520 record = self._record(element.name) 

521 # DimensionRecord subclasses for temporal elements always have 

522 # .timespan, but they're dynamic so this can't be type-checked. 

523 if record is None or record.timespan is None: 

524 return None 

525 else: 

526 timespans.append(record.timespan) 

527 return Timespan.intersection(*timespans) 

528 

529 def pack(self, name: str, *, returnMaxBits: bool = False) -> Union[Tuple[int, int], int]: 

530 """Pack this data ID into an integer. 

531 

532 Parameters 

533 ---------- 

534 name : `str` 

535 Name of the `DimensionPacker` algorithm (as defined in the 

536 dimension configuration). 

537 returnMaxBits : `bool`, optional 

538 If `True` (`False` is default), return the maximum number of 

539 nonzero bits in the returned integer across all data IDs. 

540 

541 Returns 

542 ------- 

543 packed : `int` 

544 Integer ID. This ID is unique only across data IDs that have 

545 the same values for the packer's "fixed" dimensions. 

546 maxBits : `int`, optional 

547 Maximum number of nonzero bits in ``packed``. Not returned unless 

548 ``returnMaxBits`` is `True`. 

549 

550 Notes 

551 ----- 

552 Accessing this attribute if `hasRecords` returns `False` is a logic 

553 error that may or may not raise an exception, depending on the 

554 implementation and whether assertions are enabled. 

555 """ 

556 assert self.hasRecords(), "pack() may only be called if hasRecords() returns True." 

557 return self.universe.makePacker(name, self).pack(self, returnMaxBits=returnMaxBits) 

558 

559 

560DataId = Union[DataCoordinate, Mapping[str, Any]] 

561"""A type-annotation alias for signatures that accept both informal data ID 

562dictionaries and validated `DataCoordinate` instances. 

563""" 

564 

565 

566class _DataCoordinateFullView(NamedKeyMapping[Dimension, DataIdValue]): 

567 """View class that provides the default implementation for 

568 `DataCoordinate.full`. 

569 

570 Parameters 

571 ---------- 

572 target : `DataCoordinate` 

573 The `DataCoordinate` instance this object provides a view of. 

574 """ 

575 def __init__(self, target: DataCoordinate): 

576 self._target = target 

577 

578 __slots__ = ("_target",) 

579 

580 def __getitem__(self, key: DataIdKey) -> DataIdValue: 

581 return self._target[key] 

582 

583 def __iter__(self) -> Iterator[Dimension]: 

584 return iter(self.keys()) 

585 

586 def __len__(self) -> int: 

587 return len(self.keys()) 

588 

589 def keys(self) -> NamedValueSet[Dimension]: 

590 return self._target.graph.dimensions 

591 

592 @property 

593 def names(self) -> AbstractSet[str]: 

594 # Docstring inherited from `NamedKeyMapping`. 

595 return self.keys().names 

596 

597 

598class _DataCoordinateRecordsView(NamedKeyMapping[DimensionElement, Optional[DimensionRecord]]): 

599 """View class that provides the default implementation for 

600 `DataCoordinate.records`. 

601 

602 Parameters 

603 ---------- 

604 target : `DataCoordinate` 

605 The `DataCoordinate` instance this object provides a view of. 

606 """ 

607 def __init__(self, target: DataCoordinate): 

608 self._target = target 

609 

610 __slots__ = ("_target",) 

611 

612 def __getitem__(self, key: Union[DimensionElement, str]) -> Optional[DimensionRecord]: 

613 if isinstance(key, DimensionElement): 

614 key = key.name 

615 return self._target._record(key) 

616 

617 def __iter__(self) -> Iterator[DimensionElement]: 

618 return iter(self.keys()) 

619 

620 def __len__(self) -> int: 

621 return len(self.keys()) 

622 

623 def keys(self) -> NamedValueSet[DimensionElement]: 

624 return self._target.graph.elements 

625 

626 @property 

627 def names(self) -> AbstractSet[str]: 

628 # Docstring inherited from `NamedKeyMapping`. 

629 return self.keys().names 

630 

631 

632class _BasicTupleDataCoordinate(DataCoordinate): 

633 """Standard implementation of `DataCoordinate`, backed by a tuple of 

634 values. 

635 

636 This class should only be accessed outside this module via the 

637 `DataCoordinate` interface, and should only be constructed via the static 

638 methods there. 

639 

640 Parameters 

641 ---------- 

642 graph : `DimensionGraph` 

643 The dimensions to be identified. 

644 values : `tuple` [ `int` or `str` ] 

645 Data ID values, ordered to match ``graph._dataCoordinateIndices``. May 

646 include values for just required dimensions (which always come first) 

647 or all dimensions. 

648 """ 

649 def __init__(self, graph: DimensionGraph, values: Tuple[DataIdValue, ...]): 

650 self._graph = graph 

651 self._values = values 

652 

653 __slots__ = ("_graph", "_values") 

654 

655 @property 

656 def graph(self) -> DimensionGraph: 

657 # Docstring inherited from DataCoordinate. 

658 return self._graph 

659 

660 def __getitem__(self, key: DataIdKey) -> DataIdValue: 

661 # Docstring inherited from DataCoordinate. 

662 if isinstance(key, Dimension): 

663 key = key.name 

664 index = self._graph._dataCoordinateIndices[key] 

665 try: 

666 return self._values[index] 

667 except IndexError: 

668 # Caller asked for an implied dimension, but this object only has 

669 # values for the required ones. 

670 raise KeyError(key) 

671 

672 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

673 # Docstring inherited from DataCoordinate. 

674 if self._graph == graph: 

675 return self 

676 elif self.hasFull() or self._graph.required.issuperset(graph.dimensions): 

677 return _BasicTupleDataCoordinate( 

678 graph, 

679 tuple(self[k] for k in graph._dataCoordinateIndices.keys()), 

680 ) 

681 else: 

682 return _BasicTupleDataCoordinate(graph, tuple(self[k] for k in graph.required.names)) 

683 

684 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

685 ) -> DataCoordinate: 

686 # Docstring inherited from DataCoordinate 

687 values = self._values 

688 if not self.hasFull(): 

689 # Extract a complete values tuple from the attributes of the given 

690 # records. It's possible for these to be inconsistent with 

691 # self._values (which is a serious problem, of course), but we've 

692 # documented this as a no-checking API. 

693 values += tuple(getattr(records[d.name], d.primaryKey.name) for d in self._graph.implied) 

694 return _ExpandedTupleDataCoordinate(self._graph, values, records) 

695 

696 def hasFull(self) -> bool: 

697 # Docstring inherited from DataCoordinate. 

698 return len(self._values) == len(self._graph._dataCoordinateIndices) 

699 

700 def hasRecords(self) -> bool: 

701 # Docstring inherited from DataCoordinate. 

702 return False 

703 

704 def _record(self, name: str) -> Optional[DimensionRecord]: 

705 # Docstring inherited from DataCoordinate. 

706 assert False 

707 

708 

709class _ExpandedTupleDataCoordinate(_BasicTupleDataCoordinate): 

710 """A `DataCoordinate` implementation that can hold `DimensionRecord` 

711 objects. 

712 

713 This class should only be accessed outside this module via the 

714 `DataCoordinate` interface, and should only be constructed via calls to 

715 `DataCoordinate.expanded`. 

716 

717 Parameters 

718 ---------- 

719 graph : `DimensionGraph` 

720 The dimensions to be identified. 

721 values : `tuple` [ `int` or `str` ] 

722 Data ID values, ordered to match ``graph._dataCoordinateIndices``. 

723 May include values for just required dimensions (which always come 

724 first) or all dimensions. 

725 records : `Mapping` [ `str`, `DimensionRecord` or `None` ] 

726 A `NamedKeyMapping` with `DimensionElement` keys or a regular 

727 `Mapping` with `str` (`DimensionElement` name) keys and 

728 `DimensionRecord` values. Keys must cover all elements in 

729 ``self.graph.elements``. Values may be `None`, but only to reflect 

730 actual NULL values in the database, not just records that have not 

731 been fetched. 

732 """ 

733 def __init__(self, graph: DimensionGraph, values: Tuple[DataIdValue, ...], 

734 records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]]): 

735 super().__init__(graph, values) 

736 assert super().hasFull(), "This implementation requires full dimension records." 

737 self._records = records 

738 

739 __slots__ = ("_records",) 

740 

741 def subset(self, graph: DimensionGraph) -> DataCoordinate: 

742 # Docstring inherited from DataCoordinate. 

743 if self._graph == graph: 

744 return self 

745 return _ExpandedTupleDataCoordinate(graph, 

746 tuple(self[k] for k in graph._dataCoordinateIndices.keys()), 

747 records=self._records) 

748 

749 def expanded(self, records: NameLookupMapping[DimensionElement, Optional[DimensionRecord]] 

750 ) -> DataCoordinate: 

751 # Docstring inherited from DataCoordinate. 

752 return self 

753 

754 def hasFull(self) -> bool: 

755 # Docstring inherited from DataCoordinate. 

756 return True 

757 

758 def hasRecords(self) -> bool: 

759 # Docstring inherited from DataCoordinate. 

760 return True 

761 

762 def _record(self, name: str) -> Optional[DimensionRecord]: 

763 # Docstring inherited from DataCoordinate. 

764 return self._records[name]