Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22import unittest 

23import copy 

24from dataclasses import dataclass 

25import os 

26import pickle 

27from random import Random 

28import itertools 

29from typing import Iterator, Optional 

30 

31from lsst.daf.butler import ( 

32 TimespanDatabaseRepresentation, 

33 DataCoordinate, 

34 DataCoordinateSequence, 

35 DataCoordinateSet, 

36 Dimension, 

37 DimensionGraph, 

38 DimensionUniverse, 

39 NamedKeyDict, 

40 NamedValueSet, 

41 Registry, 

42 YamlRepoImportBackend, 

43) 

44from lsst.daf.butler.registry import RegistryConfig 

45 

46DIMENSION_DATA_FILE = os.path.normpath(os.path.join(os.path.dirname(__file__), 

47 "data", "registry", "hsc-rc2-subset.yaml")) 

48 

49 

50def loadDimensionData() -> DataCoordinateSequence: 

51 """Load dimension data from an export file included in the code repository. 

52 

53 Returns 

54 ------- 

55 dataIds : `DataCoordinateSet` 

56 A set containing all data IDs in the export file. 

57 """ 

58 # Create an in-memory SQLite database and Registry just to import the YAML 

59 # data and retreive it as a set of DataCoordinate objects. 

60 config = RegistryConfig() 

61 config["db"] = "sqlite://" 

62 registry = Registry.createFromConfig(config) 

63 with open(DIMENSION_DATA_FILE, 'r') as stream: 

64 backend = YamlRepoImportBackend(stream, registry) 

65 backend.register() 

66 backend.load(datastore=None) 

67 dimensions = DimensionGraph(registry.dimensions, names=["visit", "detector", "tract", "patch"]) 

68 return registry.queryDataIds(dimensions).expanded().toSequence() 

69 

70 

71class DimensionTestCase(unittest.TestCase): 

72 """Tests for dimensions. 

73 

74 All tests here rely on the content of ``config/dimensions.yaml``, either 

75 to test that the definitions there are read in properly or just as generic 

76 data for testing various operations. 

77 """ 

78 

79 def setUp(self): 

80 self.universe = DimensionUniverse() 

81 

82 def checkGraphInvariants(self, graph): 

83 elements = list(graph.elements) 

84 for n, element in enumerate(elements): 

85 # Ordered comparisons on graphs behave like sets. 

86 self.assertLessEqual(element.graph, graph) 

87 # Ordered comparisons on elements correspond to the ordering within 

88 # a DimensionUniverse (topological, with deterministic 

89 # tiebreakers). 

90 for other in elements[:n]: 

91 self.assertLess(other, element) 

92 self.assertLessEqual(other, element) 

93 for other in elements[n + 1:]: 

94 self.assertGreater(other, element) 

95 self.assertGreaterEqual(other, element) 

96 if isinstance(element, Dimension): 

97 self.assertEqual(element.graph.required, element.required) 

98 self.assertEqual(DimensionGraph(self.universe, graph.required), graph) 

99 self.assertCountEqual(graph.required, 

100 [dimension for dimension in graph.dimensions 

101 if not any(dimension in other.graph.implied for other in graph.elements)]) 

102 self.assertCountEqual(graph.implied, graph.dimensions - graph.required) 

103 self.assertCountEqual(graph.dimensions, 

104 [element for element in graph.elements 

105 if isinstance(element, Dimension)]) 

106 self.assertCountEqual(graph.dimensions, itertools.chain(graph.required, graph.implied)) 

107 # Check primary key traversal order: each element should follow any it 

108 # requires, and element that is implied by any other in the graph 

109 # follow at least one of those. 

110 seen = NamedValueSet() 

111 for element in graph.primaryKeyTraversalOrder: 

112 with self.subTest(required=graph.required, implied=graph.implied, element=element): 

113 seen.add(element) 

114 self.assertLessEqual(element.graph.required, seen) 

115 if element in graph.implied: 

116 self.assertTrue(any(element in s.implied for s in seen)) 

117 self.assertCountEqual(seen, graph.elements) 

118 

119 def testConfigRead(self): 

120 self.assertEqual(self.universe.getStaticDimensions().names, 

121 {"instrument", "visit", "visit_system", "exposure", "detector", 

122 "physical_filter", "band", "subfilter", 

123 "skymap", "tract", "patch"} | {f"htm{level}" for level in range(25)}) 

124 

125 def testGraphs(self): 

126 self.checkGraphInvariants(self.universe.empty) 

127 for element in self.universe.getStaticElements(): 

128 self.checkGraphInvariants(element.graph) 

129 

130 def testInstrumentDimensions(self): 

131 graph = DimensionGraph(self.universe, names=("exposure", "detector", "visit")) 

132 self.assertCountEqual(graph.dimensions.names, 

133 ("instrument", "exposure", "detector", 

134 "visit", "physical_filter", "band", "visit_system")) 

135 self.assertCountEqual(graph.required.names, ("instrument", "exposure", "detector", "visit")) 

136 self.assertCountEqual(graph.implied.names, ("physical_filter", "band", "visit_system")) 

137 self.assertCountEqual(graph.elements.names - graph.dimensions.names, 

138 ("visit_detector_region", "visit_definition")) 

139 self.assertCountEqual(graph.governors.names, {"instrument"}) 

140 

141 def testCalibrationDimensions(self): 

142 graph = DimensionGraph(self.universe, names=("physical_filter", "detector")) 

143 self.assertCountEqual(graph.dimensions.names, 

144 ("instrument", "detector", "physical_filter", "band")) 

145 self.assertCountEqual(graph.required.names, ("instrument", "detector", "physical_filter")) 

146 self.assertCountEqual(graph.implied.names, ("band",)) 

147 self.assertCountEqual(graph.elements.names, graph.dimensions.names) 

148 self.assertCountEqual(graph.governors.names, {"instrument"}) 

149 

150 def testObservationDimensions(self): 

151 graph = DimensionGraph(self.universe, names=("exposure", "detector", "visit")) 

152 self.assertCountEqual(graph.dimensions.names, ("instrument", "detector", "visit", "exposure", 

153 "physical_filter", "band", "visit_system")) 

154 self.assertCountEqual(graph.required.names, ("instrument", "detector", "exposure", "visit")) 

155 self.assertCountEqual(graph.implied.names, ("physical_filter", "band", "visit_system")) 

156 self.assertCountEqual(graph.elements.names - graph.dimensions.names, 

157 ("visit_detector_region", "visit_definition")) 

158 self.assertCountEqual(graph.spatial.names, ("observation_regions",)) 

159 self.assertCountEqual(graph.temporal.names, ("observation_timespans",)) 

160 self.assertCountEqual(graph.governors.names, {"instrument"}) 

161 self.assertEqual(graph.spatial.names, {"observation_regions"}) 

162 self.assertEqual(graph.temporal.names, {"observation_timespans"}) 

163 self.assertEqual(next(iter(graph.spatial)).governor, self.universe["instrument"]) 

164 self.assertEqual(next(iter(graph.temporal)).governor, self.universe["instrument"]) 

165 

166 def testSkyMapDimensions(self): 

167 graph = DimensionGraph(self.universe, names=("patch",)) 

168 self.assertCountEqual(graph.dimensions.names, ("skymap", "tract", "patch")) 

169 self.assertCountEqual(graph.required.names, ("skymap", "tract", "patch")) 

170 self.assertCountEqual(graph.implied.names, ()) 

171 self.assertCountEqual(graph.elements.names, graph.dimensions.names) 

172 self.assertCountEqual(graph.spatial.names, ("skymap_regions",)) 

173 self.assertCountEqual(graph.governors.names, {"skymap"}) 

174 self.assertEqual(graph.spatial.names, {"skymap_regions"}) 

175 self.assertEqual(next(iter(graph.spatial)).governor, self.universe["skymap"]) 

176 

177 def testSubsetCalculation(self): 

178 """Test that independent spatial and temporal options are computed 

179 correctly. 

180 """ 

181 graph = DimensionGraph(self.universe, names=("visit", "detector", "tract", "patch", "htm7", 

182 "exposure")) 

183 self.assertCountEqual(graph.spatial.names, 

184 ("observation_regions", "skymap_regions", "htm")) 

185 self.assertCountEqual(graph.temporal.names, 

186 ("observation_timespans",)) 

187 

188 def testSchemaGeneration(self): 

189 tableSpecs = NamedKeyDict({}) 

190 for element in self.universe.getStaticElements(): 

191 if element.hasTable and element.viewOf is None: 

192 tableSpecs[element] = element.RecordClass.fields.makeTableSpec( 

193 tsRepr=TimespanDatabaseRepresentation.Compound 

194 ) 

195 for element, tableSpec in tableSpecs.items(): 

196 for dep in element.required: 

197 with self.subTest(element=element.name, dep=dep.name): 

198 if dep != element: 

199 self.assertIn(dep.name, tableSpec.fields) 

200 self.assertEqual(tableSpec.fields[dep.name].dtype, dep.primaryKey.dtype) 

201 self.assertEqual(tableSpec.fields[dep.name].length, dep.primaryKey.length) 

202 self.assertEqual(tableSpec.fields[dep.name].nbytes, dep.primaryKey.nbytes) 

203 self.assertFalse(tableSpec.fields[dep.name].nullable) 

204 self.assertTrue(tableSpec.fields[dep.name].primaryKey) 

205 else: 

206 self.assertIn(element.primaryKey.name, tableSpec.fields) 

207 self.assertEqual(tableSpec.fields[element.primaryKey.name].dtype, 

208 dep.primaryKey.dtype) 

209 self.assertEqual(tableSpec.fields[element.primaryKey.name].length, 

210 dep.primaryKey.length) 

211 self.assertEqual(tableSpec.fields[element.primaryKey.name].nbytes, 

212 dep.primaryKey.nbytes) 

213 self.assertFalse(tableSpec.fields[element.primaryKey.name].nullable) 

214 self.assertTrue(tableSpec.fields[element.primaryKey.name].primaryKey) 

215 for dep in element.implied: 

216 with self.subTest(element=element.name, dep=dep.name): 

217 self.assertIn(dep.name, tableSpec.fields) 

218 self.assertEqual(tableSpec.fields[dep.name].dtype, dep.primaryKey.dtype) 

219 self.assertFalse(tableSpec.fields[dep.name].primaryKey) 

220 for foreignKey in tableSpec.foreignKeys: 

221 self.assertIn(foreignKey.table, tableSpecs) 

222 self.assertIn(foreignKey.table, element.graph.dimensions.names) 

223 self.assertEqual(len(foreignKey.source), len(foreignKey.target)) 

224 for source, target in zip(foreignKey.source, foreignKey.target): 

225 self.assertIn(source, tableSpec.fields.names) 

226 self.assertIn(target, tableSpecs[foreignKey.table].fields.names) 

227 self.assertEqual(tableSpec.fields[source].dtype, 

228 tableSpecs[foreignKey.table].fields[target].dtype) 

229 self.assertEqual(tableSpec.fields[source].length, 

230 tableSpecs[foreignKey.table].fields[target].length) 

231 self.assertEqual(tableSpec.fields[source].nbytes, 

232 tableSpecs[foreignKey.table].fields[target].nbytes) 

233 

234 def testPickling(self): 

235 # Pickling and copying should always yield the exact same object within 

236 # a single process (cross-process is impossible to test here). 

237 universe1 = DimensionUniverse() 

238 universe2 = pickle.loads(pickle.dumps(universe1)) 

239 universe3 = copy.copy(universe1) 

240 universe4 = copy.deepcopy(universe1) 

241 self.assertIs(universe1, universe2) 

242 self.assertIs(universe1, universe3) 

243 self.assertIs(universe1, universe4) 

244 for element1 in universe1.getStaticElements(): 

245 element2 = pickle.loads(pickle.dumps(element1)) 

246 self.assertIs(element1, element2) 

247 graph1 = element1.graph 

248 graph2 = pickle.loads(pickle.dumps(graph1)) 

249 self.assertIs(graph1, graph2) 

250 

251 

252@dataclass 

253class SplitByStateFlags: 

254 """A struct that separates data IDs with different states but the same 

255 values. 

256 """ 

257 

258 minimal: Optional[DataCoordinateSequence] = None 

259 """Data IDs that only contain values for required dimensions. 

260 

261 `DataCoordinateSequence.hasFull()` will return `True` for this if and only 

262 if ``minimal.graph.implied`` has no elements. 

263 `DataCoordinate.hasRecords()` will always return `False`. 

264 """ 

265 

266 complete: Optional[DataCoordinateSequence] = None 

267 """Data IDs that contain values for all dimensions. 

268 

269 `DataCoordinateSequence.hasFull()` will always `True` and 

270 `DataCoordinate.hasRecords()` will always return `True` for this attribute. 

271 """ 

272 

273 expanded: Optional[DataCoordinateSequence] = None 

274 """Data IDs that contain values for all dimensions as well as records. 

275 

276 `DataCoordinateSequence.hasFull()` and `DataCoordinate.hasRecords()` will 

277 always return `True` for this attribute. 

278 """ 

279 

280 def chain(self, n: Optional[int] = None) -> Iterator: 

281 """Iterate over the data IDs of different types. 

282 

283 Parameters 

284 ---------- 

285 n : `int`, optional 

286 If provided (`None` is default), iterate over only the ``nth`` 

287 data ID in each attribute. 

288 

289 Yields 

290 ------ 

291 dataId : `DataCoordinate` 

292 A data ID from one of the attributes in this struct. 

293 """ 

294 if n is None: 

295 s = slice(None, None) 

296 else: 

297 s = slice(n, n + 1) 

298 if self.minimal is not None: 

299 yield from self.minimal[s] 

300 if self.complete is not None: 

301 yield from self.complete[s] 

302 if self.expanded is not None: 

303 yield from self.expanded[s] 

304 

305 

306class DataCoordinateTestCase(unittest.TestCase): 

307 

308 RANDOM_SEED = 10 

309 

310 @classmethod 

311 def setUpClass(cls): 

312 cls.allDataIds = loadDimensionData() 

313 

314 def setUp(self): 

315 self.rng = Random(self.RANDOM_SEED) 

316 

317 def randomDataIds(self, n: int, dataIds: Optional[DataCoordinateSequence] = None): 

318 """Select random data IDs from those loaded from test data. 

319 

320 Parameters 

321 ---------- 

322 n : `int` 

323 Number of data IDs to select. 

324 dataIds : `DataCoordinateSequence`, optional 

325 Data IDs to select from. Defaults to ``self.allDataIds``. 

326 

327 Returns 

328 ------- 

329 selected : `DataCoordinateSequence` 

330 ``n`` Data IDs randomly selected from ``dataIds`` with replacement. 

331 """ 

332 if dataIds is None: 

333 dataIds = self.allDataIds 

334 return DataCoordinateSequence(self.rng.sample(dataIds, n), 

335 graph=dataIds.graph, 

336 hasFull=dataIds.hasFull(), 

337 hasRecords=dataIds.hasRecords(), 

338 check=False) 

339 

340 def randomDimensionSubset(self, n: int = 3, graph: Optional[DimensionGraph] = None) -> DimensionGraph: 

341 """Generate a random `DimensionGraph` that has a subset of the 

342 dimensions in a given one. 

343 

344 Parameters 

345 ---------- 

346 n : `int` 

347 Number of dimensions to select, before automatic expansion by 

348 `DimensionGraph`. 

349 dataIds : `DimensionGraph`, optional 

350 Dimensions to select ffrom. Defaults to ``self.allDataIds.graph``. 

351 

352 Returns 

353 ------- 

354 selected : `DimensionGraph` 

355 ``n`` or more dimensions randomly selected from ``graph`` with 

356 replacement. 

357 """ 

358 if graph is None: 

359 graph = self.allDataIds.graph 

360 return DimensionGraph( 

361 graph.universe, 

362 names=self.rng.sample(list(graph.dimensions.names), max(n, len(graph.dimensions))) 

363 ) 

364 

365 def splitByStateFlags(self, dataIds: Optional[DataCoordinateSequence] = None, *, 

366 expanded: bool = True, 

367 complete: bool = True, 

368 minimal: bool = True) -> SplitByStateFlags: 

369 """Given a sequence of data IDs, generate new equivalent sequences 

370 containing less information. 

371 

372 Parameters 

373 ---------- 

374 dataIds : `DataCoordinateSequence`, optional. 

375 Data IDs to start from. Defaults to ``self.allDataIds``. 

376 ``dataIds.hasRecords()`` and ``dataIds.hasFull()`` must both return 

377 `True`. 

378 expanded : `bool`, optional 

379 If `True` (default) include the original data IDs that contain all 

380 information in the result. 

381 complete : `bool`, optional 

382 If `True` (default) include data IDs for which ``hasFull()`` 

383 returns `True` but ``hasRecords()`` does not. 

384 minimal : `bool`, optional 

385 If `True` (default) include data IDS that only contain values for 

386 required dimensions, for which ``hasFull()`` may not return `True`. 

387 

388 Returns 

389 ------- 

390 split : `SplitByStateFlags` 

391 A dataclass holding the indicated data IDs in attributes that 

392 correspond to the boolean keyword arguments. 

393 """ 

394 if dataIds is None: 

395 dataIds = self.allDataIds 

396 assert dataIds.hasFull() and dataIds.hasRecords() 

397 result = SplitByStateFlags(expanded=dataIds) 

398 if complete: 

399 result.complete = DataCoordinateSequence( 

400 [DataCoordinate.standardize(e.full.byName(), graph=dataIds.graph) for e in result.expanded], 

401 graph=dataIds.graph 

402 ) 

403 self.assertTrue(result.complete.hasFull()) 

404 self.assertFalse(result.complete.hasRecords()) 

405 if minimal: 

406 result.minimal = DataCoordinateSequence( 

407 [DataCoordinate.standardize(e.byName(), graph=dataIds.graph) for e in result.expanded], 

408 graph=dataIds.graph 

409 ) 

410 self.assertEqual(result.minimal.hasFull(), not dataIds.graph.implied) 

411 self.assertFalse(result.minimal.hasRecords()) 

412 if not expanded: 

413 result.expanded = None 

414 return result 

415 

416 def testMappingInterface(self): 

417 """Test that the mapping interface in `DataCoordinate` and (when 

418 applicable) its ``full`` property are self-consistent and consistent 

419 with the ``graph`` property. 

420 """ 

421 for n in range(5): 

422 dimensions = self.randomDimensionSubset() 

423 dataIds = self.randomDataIds(n=1).subset(dimensions) 

424 split = self.splitByStateFlags(dataIds) 

425 for dataId in split.chain(): 

426 with self.subTest(dataId=dataId): 

427 self.assertEqual(list(dataId.values()), [dataId[d] for d in dataId.keys()]) 

428 self.assertEqual(list(dataId.values()), [dataId[d.name] for d in dataId.keys()]) 

429 self.assertEqual(dataId.keys(), dataId.graph.required) 

430 for dataId in itertools.chain(split.complete, split.expanded): 

431 with self.subTest(dataId=dataId): 

432 self.assertTrue(dataId.hasFull()) 

433 self.assertEqual(dataId.graph.dimensions, dataId.full.keys()) 

434 self.assertEqual(list(dataId.full.values()), [dataId[k] for k in dataId.graph.dimensions]) 

435 

436 def testEquality(self): 

437 """Test that different `DataCoordinate` instances with different state 

438 flags can be compared with each other and other mappings. 

439 """ 

440 dataIds = self.randomDataIds(n=2) 

441 split = self.splitByStateFlags(dataIds) 

442 # Iterate over all combinations of different states of DataCoordinate, 

443 # with the same underlying data ID values. 

444 for a0, b0 in itertools.combinations(split.chain(0), 2): 

445 self.assertEqual(a0, b0) 

446 self.assertEqual(a0, b0.byName()) 

447 self.assertEqual(a0.byName(), b0) 

448 # Same thing, for a different data ID value. 

449 for a1, b1 in itertools.combinations(split.chain(1), 2): 

450 self.assertEqual(a1, b1) 

451 self.assertEqual(a1, b1.byName()) 

452 self.assertEqual(a1.byName(), b1) 

453 # Iterate over all combinations of different states of DataCoordinate, 

454 # with different underlying data ID values. 

455 for a0, b1 in itertools.product(split.chain(0), split.chain(1)): 

456 self.assertNotEqual(a0, b1) 

457 self.assertNotEqual(a1, b0) 

458 self.assertNotEqual(a0, b1.byName()) 

459 self.assertNotEqual(a0.byName(), b1) 

460 self.assertNotEqual(a1, b0.byName()) 

461 self.assertNotEqual(a1.byName(), b0) 

462 

463 def testStandardize(self): 

464 """Test constructing a DataCoordinate from many different kinds of 

465 input via `DataCoordinate.standardize` and `DataCoordinate.subset`. 

466 """ 

467 for n in range(5): 

468 dimensions = self.randomDimensionSubset() 

469 dataIds = self.randomDataIds(n=1).subset(dimensions) 

470 split = self.splitByStateFlags(dataIds) 

471 for m, dataId in enumerate(split.chain()): 

472 # Passing in any kind of DataCoordinate alone just returns 

473 # that object. 

474 self.assertIs(dataId, DataCoordinate.standardize(dataId)) 

475 # Same if we also explicitly pass the dimensions we want. 

476 self.assertIs(dataId, DataCoordinate.standardize(dataId, graph=dataId.graph)) 

477 # Same if we pass the dimensions and some irrelevant 

478 # kwargs. 

479 self.assertIs(dataId, DataCoordinate.standardize(dataId, graph=dataId.graph, htm7=12)) 

480 # Test constructing a new data ID from this one with a 

481 # subset of the dimensions. 

482 # This is not possible for some combinations of 

483 # dimensions if hasFull is False (see 

484 # `DataCoordinate.subset` docs). 

485 newDimensions = self.randomDimensionSubset(n=1, graph=dataId.graph) 

486 if dataId.hasFull() or dataId.graph.required.issuperset(newDimensions.required): 

487 newDataIds = [ 

488 dataId.subset(newDimensions), 

489 DataCoordinate.standardize(dataId, graph=newDimensions), 

490 DataCoordinate.standardize(dataId, graph=newDimensions, htm7=12), 

491 ] 

492 for newDataId in newDataIds: 

493 with self.subTest(newDataId=newDataId, type=type(dataId)): 

494 commonKeys = dataId.keys() & newDataId.keys() 

495 self.assertTrue(commonKeys) 

496 self.assertEqual( 

497 [newDataId[k] for k in commonKeys], 

498 [dataId[k] for k in commonKeys], 

499 ) 

500 # This should never "downgrade" from 

501 # Complete to Minimal or Expanded to Complete. 

502 if dataId.hasRecords(): 

503 self.assertTrue(newDataId.hasRecords()) 

504 if dataId.hasFull(): 

505 self.assertTrue(newDataId.hasFull()) 

506 # Start from a complete data ID, and pass its values in via several 

507 # different ways that should be equivalent. 

508 for dataId in split.complete: 

509 # Split the keys (dimension names) into two random subsets, so 

510 # we can pass some as kwargs below. 

511 keys1 = set(self.rng.sample(list(dataId.graph.dimensions.names), 

512 len(dataId.graph.dimensions)//2)) 

513 keys2 = dataId.graph.dimensions.names - keys1 

514 newCompleteDataIds = [ 

515 DataCoordinate.standardize(dataId.full.byName(), universe=dataId.universe), 

516 DataCoordinate.standardize(dataId.full.byName(), graph=dataId.graph), 

517 DataCoordinate.standardize(DataCoordinate.makeEmpty(dataId.graph.universe), 

518 **dataId.full.byName()), 

519 DataCoordinate.standardize(DataCoordinate.makeEmpty(dataId.graph.universe), 

520 graph=dataId.graph, **dataId.full.byName()), 

521 DataCoordinate.standardize(**dataId.full.byName(), universe=dataId.universe), 

522 DataCoordinate.standardize(graph=dataId.graph, **dataId.full.byName()), 

523 DataCoordinate.standardize( 

524 {k: dataId[k] for k in keys1}, 

525 universe=dataId.universe, 

526 **{k: dataId[k] for k in keys2} 

527 ), 

528 DataCoordinate.standardize( 

529 {k: dataId[k] for k in keys1}, 

530 graph=dataId.graph, 

531 **{k: dataId[k] for k in keys2} 

532 ), 

533 ] 

534 for newDataId in newCompleteDataIds: 

535 with self.subTest(dataId=dataId, newDataId=newDataId, type=type(dataId)): 

536 self.assertEqual(dataId, newDataId) 

537 self.assertTrue(newDataId.hasFull()) 

538 

539 def testRegions(self): 

540 """Test that data IDs for a few known dimensions have the expected 

541 regions. 

542 """ 

543 for dataId in self.randomDataIds(n=4).subset( 

544 DimensionGraph(self.allDataIds.universe, names=["visit"])): 

545 self.assertIsNotNone(dataId.region) 

546 self.assertEqual(dataId.graph.spatial.names, {"observation_regions"}) 

547 self.assertEqual(dataId.region, dataId.records["visit"].region) 

548 for dataId in self.randomDataIds(n=4).subset( 

549 DimensionGraph(self.allDataIds.universe, names=["visit", "detector"])): 

550 self.assertIsNotNone(dataId.region) 

551 self.assertEqual(dataId.graph.spatial.names, {"observation_regions"}) 

552 self.assertEqual(dataId.region, dataId.records["visit_detector_region"].region) 

553 for dataId in self.randomDataIds(n=4).subset( 

554 DimensionGraph(self.allDataIds.universe, names=["tract"])): 

555 self.assertIsNotNone(dataId.region) 

556 self.assertEqual(dataId.graph.spatial.names, {"skymap_regions"}) 

557 self.assertEqual(dataId.region, dataId.records["tract"].region) 

558 for dataId in self.randomDataIds(n=4).subset( 

559 DimensionGraph(self.allDataIds.universe, names=["patch"])): 

560 self.assertIsNotNone(dataId.region) 

561 self.assertEqual(dataId.graph.spatial.names, {"skymap_regions"}) 

562 self.assertEqual(dataId.region, dataId.records["patch"].region) 

563 

564 def testTimespans(self): 

565 """Test that data IDs for a few known dimensions have the expected 

566 timespans. 

567 """ 

568 for dataId in self.randomDataIds(n=4).subset( 

569 DimensionGraph(self.allDataIds.universe, names=["visit"])): 

570 self.assertIsNotNone(dataId.timespan) 

571 self.assertEqual(dataId.graph.temporal.names, {"observation_timespans"}) 

572 self.assertEqual(dataId.timespan, dataId.records["visit"].timespan) 

573 

574 def testIterableStatusFlags(self): 

575 """Test that DataCoordinateSet and DataCoordinateSequence compute 

576 their hasFull and hasRecords flags correctly from their elements. 

577 """ 

578 dataIds = self.randomDataIds(n=10) 

579 split = self.splitByStateFlags(dataIds) 

580 for cls in (DataCoordinateSet, DataCoordinateSequence): 

581 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=True).hasFull()) 

582 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=False).hasFull()) 

583 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=True).hasRecords()) 

584 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=False).hasRecords()) 

585 self.assertTrue(cls(split.complete, graph=dataIds.graph, check=True).hasFull()) 

586 self.assertTrue(cls(split.complete, graph=dataIds.graph, check=False).hasFull()) 

587 self.assertFalse(cls(split.complete, graph=dataIds.graph, check=True).hasRecords()) 

588 self.assertFalse(cls(split.complete, graph=dataIds.graph, check=False).hasRecords()) 

589 with self.assertRaises(ValueError): 

590 cls(split.complete, graph=dataIds.graph, hasRecords=True, check=True) 

591 self.assertEqual(cls(split.minimal, graph=dataIds.graph, check=True).hasFull(), 

592 not dataIds.graph.implied) 

593 self.assertEqual(cls(split.minimal, graph=dataIds.graph, check=False).hasFull(), 

594 not dataIds.graph.implied) 

595 self.assertFalse(cls(split.minimal, graph=dataIds.graph, check=True).hasRecords()) 

596 self.assertFalse(cls(split.minimal, graph=dataIds.graph, check=False).hasRecords()) 

597 with self.assertRaises(ValueError): 

598 cls(split.minimal, graph=dataIds.graph, hasRecords=True, check=True) 

599 if dataIds.graph.implied: 

600 with self.assertRaises(ValueError): 

601 cls(split.minimal, graph=dataIds.graph, hasFull=True, check=True) 

602 

603 def testSetOperations(self): 

604 """Test for self-consistency across DataCoordinateSet's operations. 

605 """ 

606 c = self.randomDataIds(n=10).toSet() 

607 a = self.randomDataIds(n=20).toSet() | c 

608 b = self.randomDataIds(n=20).toSet() | c 

609 # Make sure we don't have a particularly unlucky random seed, since 

610 # that would make a lot of this test uninteresting. 

611 self.assertNotEqual(a, b) 

612 self.assertGreater(len(a), 0) 

613 self.assertGreater(len(b), 0) 

614 # The rest of the tests should not depend on the random seed. 

615 self.assertEqual(a, a) 

616 self.assertNotEqual(a, a.toSequence()) 

617 self.assertEqual(a, a.toSequence().toSet()) 

618 self.assertEqual(a, a.toSequence().toSet()) 

619 self.assertEqual(b, b) 

620 self.assertNotEqual(b, b.toSequence()) 

621 self.assertEqual(b, b.toSequence().toSet()) 

622 self.assertEqual(a & b, a.intersection(b)) 

623 self.assertLessEqual(a & b, a) 

624 self.assertLessEqual(a & b, b) 

625 self.assertEqual(a | b, a.union(b)) 

626 self.assertGreaterEqual(a | b, a) 

627 self.assertGreaterEqual(a | b, b) 

628 self.assertEqual(a - b, a.difference(b)) 

629 self.assertLessEqual(a - b, a) 

630 self.assertLessEqual(b - a, b) 

631 self.assertEqual(a ^ b, a.symmetric_difference(b)) 

632 self.assertGreaterEqual(a ^ b, (a | b) - (a & b)) 

633 

634 

635if __name__ == "__main__": 635 ↛ 636line 635 didn't jump to line 636, because the condition on line 635 was never true

636 unittest.main()