Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22import unittest 

23import copy 

24from dataclasses import dataclass 

25import os 

26import pickle 

27from random import Random 

28import itertools 

29from typing import Iterator, Optional 

30 

31from lsst.daf.butler import ( 

32 DatabaseTimespanRepresentation, 

33 DataCoordinate, 

34 DataCoordinateSequence, 

35 DataCoordinateSet, 

36 Dimension, 

37 DimensionGraph, 

38 DimensionUniverse, 

39 NamedKeyDict, 

40 NamedValueSet, 

41 Registry, 

42 YamlRepoImportBackend, 

43) 

44from lsst.daf.butler.registry import RegistryConfig 

45 

46DIMENSION_DATA_FILE = os.path.normpath(os.path.join(os.path.dirname(__file__), 

47 "data", "registry", "hsc-rc2-subset.yaml")) 

48 

49 

50def loadDimensionData() -> DataCoordinateSequence: 

51 """Load dimension data from an export file included in the code repository. 

52 

53 Returns 

54 ------- 

55 dataIds : `DataCoordinateSet` 

56 A set containing all data IDs in the export file. 

57 """ 

58 # Create an in-memory SQLite database and Registry just to import the YAML 

59 # data and retreive it as a set of DataCoordinate objects. 

60 config = RegistryConfig() 

61 config["db"] = "sqlite://" 

62 registry = Registry.fromConfig(config, create=True) 

63 with open(DIMENSION_DATA_FILE, 'r') as stream: 

64 backend = YamlRepoImportBackend(stream, registry) 

65 backend.register() 

66 backend.load(datastore=None) 

67 dimensions = DimensionGraph(registry.dimensions, names=["visit", "detector", "tract", "patch"]) 

68 return registry.queryDataIds(dimensions).expanded().toSequence() 

69 

70 

71class DimensionTestCase(unittest.TestCase): 

72 """Tests for dimensions. 

73 

74 All tests here rely on the content of ``config/dimensions.yaml``, either 

75 to test that the definitions there are read in properly or just as generic 

76 data for testing various operations. 

77 """ 

78 

79 def setUp(self): 

80 self.universe = DimensionUniverse() 

81 

82 def checkGraphInvariants(self, graph): 

83 elements = list(graph.elements) 

84 for n, element in enumerate(elements): 

85 # Ordered comparisons on graphs behave like sets. 

86 self.assertLessEqual(element.graph, graph) 

87 # Ordered comparisons on elements correspond to the ordering within 

88 # a DimensionUniverse (topological, with deterministic 

89 # tiebreakers). 

90 for other in elements[:n]: 

91 self.assertLess(other, element) 

92 self.assertLessEqual(other, element) 

93 for other in elements[n + 1:]: 

94 self.assertGreater(other, element) 

95 self.assertGreaterEqual(other, element) 

96 if isinstance(element, Dimension): 

97 self.assertEqual(element.graph.required, element.required) 

98 self.assertEqual(DimensionGraph(self.universe, graph.required), graph) 

99 self.assertCountEqual(graph.required, 

100 [dimension for dimension in graph.dimensions 

101 if not any(dimension in other.graph.implied for other in graph.elements)]) 

102 self.assertCountEqual(graph.implied, graph.dimensions - graph.required) 

103 self.assertCountEqual(graph.dimensions, 

104 [element for element in graph.elements 

105 if isinstance(element, Dimension)]) 

106 self.assertCountEqual(graph.dimensions, itertools.chain(graph.required, graph.implied)) 

107 # Check primary key traversal order: each element should follow any it 

108 # requires, and element that is implied by any other in the graph 

109 # follow at least one of those. 

110 seen = NamedValueSet() 

111 for element in graph.primaryKeyTraversalOrder: 

112 with self.subTest(required=graph.required, implied=graph.implied, element=element): 

113 seen.add(element) 

114 self.assertLessEqual(element.graph.required, seen) 

115 if element in graph.implied: 

116 self.assertTrue(any(element in s.implied for s in seen)) 

117 self.assertCountEqual(seen, graph.elements) 

118 # Test encoding and decoding of DimensionGraphs to bytes. 

119 encoded = graph.encode() 

120 self.assertEqual(len(encoded), self.universe.getEncodeLength()) 

121 self.assertEqual(DimensionGraph.decode(encoded, universe=self.universe), graph) 

122 

123 def testConfigRead(self): 

124 self.assertEqual(self.universe.getStaticDimensions().names, 

125 {"instrument", "visit", "visit_system", "exposure", "detector", 

126 "physical_filter", "band", "subfilter", 

127 "skymap", "tract", "patch"} | {f"htm{level}" for level in range(25)}) 

128 

129 def testGraphs(self): 

130 self.checkGraphInvariants(self.universe.empty) 

131 for element in self.universe.getStaticElements(): 

132 self.checkGraphInvariants(element.graph) 

133 

134 def testInstrumentDimensions(self): 

135 graph = DimensionGraph(self.universe, names=("exposure", "detector", "visit")) 

136 self.assertCountEqual(graph.dimensions.names, 

137 ("instrument", "exposure", "detector", 

138 "visit", "physical_filter", "band", "visit_system")) 

139 self.assertCountEqual(graph.required.names, ("instrument", "exposure", "detector", "visit")) 

140 self.assertCountEqual(graph.implied.names, ("physical_filter", "band", "visit_system")) 

141 self.assertCountEqual(graph.elements.names - graph.dimensions.names, 

142 ("visit_detector_region", "visit_definition")) 

143 

144 def testCalibrationDimensions(self): 

145 graph = DimensionGraph(self.universe, names=("physical_filter", "detector")) 

146 self.assertCountEqual(graph.dimensions.names, 

147 ("instrument", "detector", "physical_filter", "band")) 

148 self.assertCountEqual(graph.required.names, ("instrument", "detector", "physical_filter")) 

149 self.assertCountEqual(graph.implied.names, ("band",)) 

150 self.assertCountEqual(graph.elements.names, graph.dimensions.names) 

151 

152 def testObservationDimensions(self): 

153 graph = DimensionGraph(self.universe, names=("exposure", "detector", "visit")) 

154 self.assertCountEqual(graph.dimensions.names, ("instrument", "detector", "visit", "exposure", 

155 "physical_filter", "band", "visit_system")) 

156 self.assertCountEqual(graph.required.names, ("instrument", "detector", "exposure", "visit")) 

157 self.assertCountEqual(graph.implied.names, ("physical_filter", "band", "visit_system")) 

158 self.assertCountEqual(graph.elements.names - graph.dimensions.names, 

159 ("visit_detector_region", "visit_definition")) 

160 self.assertCountEqual(graph.spatial.names, ("visit_detector_region",)) 

161 self.assertCountEqual(graph.temporal.names, ("exposure",)) 

162 

163 def testSkyMapDimensions(self): 

164 graph = DimensionGraph(self.universe, names=("patch",)) 

165 self.assertCountEqual(graph.dimensions.names, ("skymap", "tract", "patch")) 

166 self.assertCountEqual(graph.required.names, ("skymap", "tract", "patch")) 

167 self.assertCountEqual(graph.implied.names, ()) 

168 self.assertCountEqual(graph.elements.names, graph.dimensions.names) 

169 self.assertCountEqual(graph.spatial.names, ("patch",)) 

170 

171 def testSubsetCalculation(self): 

172 """Test that independent spatial and temporal options are computed 

173 correctly. 

174 """ 

175 graph = DimensionGraph(self.universe, names=("visit", "detector", "tract", "patch", "htm7", 

176 "exposure")) 

177 self.assertCountEqual(graph.spatial.names, 

178 ("visit_detector_region", "patch", "htm7")) 

179 self.assertCountEqual(graph.temporal.names, 

180 ("exposure",)) 

181 

182 def testSchemaGeneration(self): 

183 tableSpecs = NamedKeyDict({}) 

184 for element in self.universe.getStaticElements(): 

185 if element.hasTable and element.viewOf is None: 

186 tableSpecs[element] = element.RecordClass.fields.makeTableSpec( 

187 tsRepr=DatabaseTimespanRepresentation.Compound 

188 ) 

189 for element, tableSpec in tableSpecs.items(): 

190 for dep in element.required: 

191 with self.subTest(element=element.name, dep=dep.name): 

192 if dep != element: 

193 self.assertIn(dep.name, tableSpec.fields) 

194 self.assertEqual(tableSpec.fields[dep.name].dtype, dep.primaryKey.dtype) 

195 self.assertEqual(tableSpec.fields[dep.name].length, dep.primaryKey.length) 

196 self.assertEqual(tableSpec.fields[dep.name].nbytes, dep.primaryKey.nbytes) 

197 self.assertFalse(tableSpec.fields[dep.name].nullable) 

198 self.assertTrue(tableSpec.fields[dep.name].primaryKey) 

199 else: 

200 self.assertIn(element.primaryKey.name, tableSpec.fields) 

201 self.assertEqual(tableSpec.fields[element.primaryKey.name].dtype, 

202 dep.primaryKey.dtype) 

203 self.assertEqual(tableSpec.fields[element.primaryKey.name].length, 

204 dep.primaryKey.length) 

205 self.assertEqual(tableSpec.fields[element.primaryKey.name].nbytes, 

206 dep.primaryKey.nbytes) 

207 self.assertFalse(tableSpec.fields[element.primaryKey.name].nullable) 

208 self.assertTrue(tableSpec.fields[element.primaryKey.name].primaryKey) 

209 for dep in element.implied: 

210 with self.subTest(element=element.name, dep=dep.name): 

211 self.assertIn(dep.name, tableSpec.fields) 

212 self.assertEqual(tableSpec.fields[dep.name].dtype, dep.primaryKey.dtype) 

213 self.assertFalse(tableSpec.fields[dep.name].primaryKey) 

214 for foreignKey in tableSpec.foreignKeys: 

215 self.assertIn(foreignKey.table, tableSpecs) 

216 self.assertIn(foreignKey.table, element.graph.dimensions.names) 

217 self.assertEqual(len(foreignKey.source), len(foreignKey.target)) 

218 for source, target in zip(foreignKey.source, foreignKey.target): 

219 self.assertIn(source, tableSpec.fields.names) 

220 self.assertIn(target, tableSpecs[foreignKey.table].fields.names) 

221 self.assertEqual(tableSpec.fields[source].dtype, 

222 tableSpecs[foreignKey.table].fields[target].dtype) 

223 self.assertEqual(tableSpec.fields[source].length, 

224 tableSpecs[foreignKey.table].fields[target].length) 

225 self.assertEqual(tableSpec.fields[source].nbytes, 

226 tableSpecs[foreignKey.table].fields[target].nbytes) 

227 

228 def testPickling(self): 

229 # Pickling and copying should always yield the exact same object within 

230 # a single process (cross-process is impossible to test here). 

231 universe1 = DimensionUniverse() 

232 universe2 = pickle.loads(pickle.dumps(universe1)) 

233 universe3 = copy.copy(universe1) 

234 universe4 = copy.deepcopy(universe1) 

235 self.assertIs(universe1, universe2) 

236 self.assertIs(universe1, universe3) 

237 self.assertIs(universe1, universe4) 

238 for element1 in universe1.getStaticElements(): 

239 element2 = pickle.loads(pickle.dumps(element1)) 

240 self.assertIs(element1, element2) 

241 graph1 = element1.graph 

242 graph2 = pickle.loads(pickle.dumps(graph1)) 

243 self.assertIs(graph1, graph2) 

244 

245 

246@dataclass 

247class SplitByStateFlags: 

248 """A struct that separates data IDs with different states but the same 

249 values. 

250 """ 

251 

252 minimal: Optional[DataCoordinateSequence] = None 

253 """Data IDs that only contain values for required dimensions. 

254 

255 `DataCoordinateSequence.hasFull()` will return `True` for this if and only 

256 if ``minimal.graph.implied`` has no elements. 

257 `DataCoordinate.hasRecords()` will always return `False`. 

258 """ 

259 

260 complete: Optional[DataCoordinateSequence] = None 

261 """Data IDs that contain values for all dimensions. 

262 

263 `DataCoordinateSequence.hasFull()` will always `True` and 

264 `DataCoordinate.hasRecords()` will always return `True` for this attribute. 

265 """ 

266 

267 expanded: Optional[DataCoordinateSequence] = None 

268 """Data IDs that contain values for all dimensions as well as records. 

269 

270 `DataCoordinateSequence.hasFull()` and `DataCoordinate.hasRecords()` will 

271 always return `True` for this attribute. 

272 """ 

273 

274 def chain(self, n: Optional[int] = None) -> Iterator: 

275 """Iterate over the data IDs of different types. 

276 

277 Parameters 

278 ---------- 

279 n : `int`, optional 

280 If provided (`None` is default), iterate over only the ``nth`` 

281 data ID in each attribute. 

282 

283 Yields 

284 ------ 

285 dataId : `DataCoordinate` 

286 A data ID from one of the attributes in this struct. 

287 """ 

288 if n is None: 

289 s = slice(None, None) 

290 else: 

291 s = slice(n, n + 1) 

292 if self.minimal is not None: 

293 yield from self.minimal[s] 

294 if self.complete is not None: 

295 yield from self.complete[s] 

296 if self.expanded is not None: 

297 yield from self.expanded[s] 

298 

299 

300class DataCoordinateTestCase(unittest.TestCase): 

301 

302 RANDOM_SEED = 10 

303 

304 @classmethod 

305 def setUpClass(cls): 

306 cls.allDataIds = loadDimensionData() 

307 

308 def setUp(self): 

309 self.rng = Random(self.RANDOM_SEED) 

310 

311 def randomDataIds(self, n: int, dataIds: Optional[DataCoordinateSequence] = None): 

312 """Select random data IDs from those loaded from test data. 

313 

314 Parameters 

315 ---------- 

316 n : `int` 

317 Number of data IDs to select. 

318 dataIds : `DataCoordinateSequence`, optional 

319 Data IDs to select from. Defaults to ``self.allDataIds``. 

320 

321 Returns 

322 ------- 

323 selected : `DataCoordinateSequence` 

324 ``n`` Data IDs randomly selected from ``dataIds`` with replacement. 

325 """ 

326 if dataIds is None: 

327 dataIds = self.allDataIds 

328 return DataCoordinateSequence(self.rng.sample(dataIds, n), 

329 graph=dataIds.graph, 

330 hasFull=dataIds.hasFull(), 

331 hasRecords=dataIds.hasRecords(), 

332 check=False) 

333 

334 def randomDimensionSubset(self, n: int = 3, graph: Optional[DimensionGraph] = None) -> DimensionGraph: 

335 """Generate a random `DimensionGraph` that has a subset of the 

336 dimensions in a given one. 

337 

338 Parameters 

339 ---------- 

340 n : `int` 

341 Number of dimensions to select, before automatic expansion by 

342 `DimensionGraph`. 

343 dataIds : `DimensionGraph`, optional 

344 Dimensions to select ffrom. Defaults to ``self.allDataIds.graph``. 

345 

346 Returns 

347 ------- 

348 selected : `DimensionGraph` 

349 ``n`` or more dimensions randomly selected from ``graph`` with 

350 replacement. 

351 """ 

352 if graph is None: 

353 graph = self.allDataIds.graph 

354 return DimensionGraph( 

355 graph.universe, 

356 names=self.rng.sample(list(graph.dimensions.names), max(n, len(graph.dimensions))) 

357 ) 

358 

359 def splitByStateFlags(self, dataIds: Optional[DataCoordinateSequence] = None, *, 

360 expanded: bool = True, 

361 complete: bool = True, 

362 minimal: bool = True) -> SplitByStateFlags: 

363 """Given a sequence of data IDs, generate new equivalent sequences 

364 containing less information. 

365 

366 Parameters 

367 ---------- 

368 dataIds : `DataCoordinateSequence`, optional. 

369 Data IDs to start from. Defaults to ``self.allDataIds``. 

370 ``dataIds.hasRecords()`` and ``dataIds.hasFull()`` must both return 

371 `True`. 

372 expanded : `bool`, optional 

373 If `True` (default) include the original data IDs that contain all 

374 information in the result. 

375 complete : `bool`, optional 

376 If `True` (default) include data IDs for which ``hasFull()`` 

377 returns `True` but ``hasRecords()`` does not. 

378 minimal : `bool`, optional 

379 If `True` (default) include data IDS that only contain values for 

380 required dimensions, for which ``hasFull()`` may not return `True`. 

381 

382 Returns 

383 ------- 

384 split : `SplitByStateFlags` 

385 A dataclass holding the indicated data IDs in attributes that 

386 correspond to the boolean keyword arguments. 

387 """ 

388 if dataIds is None: 

389 dataIds = self.allDataIds 

390 assert dataIds.hasFull() and dataIds.hasRecords() 

391 result = SplitByStateFlags(expanded=dataIds) 

392 if complete: 

393 result.complete = DataCoordinateSequence( 

394 [DataCoordinate.standardize(e.full.byName(), graph=dataIds.graph) for e in result.expanded], 

395 graph=dataIds.graph 

396 ) 

397 self.assertTrue(result.complete.hasFull()) 

398 self.assertFalse(result.complete.hasRecords()) 

399 if minimal: 

400 result.minimal = DataCoordinateSequence( 

401 [DataCoordinate.standardize(e.byName(), graph=dataIds.graph) for e in result.expanded], 

402 graph=dataIds.graph 

403 ) 

404 self.assertEqual(result.minimal.hasFull(), not dataIds.graph.implied) 

405 self.assertFalse(result.minimal.hasRecords()) 

406 if not expanded: 

407 result.expanded = None 

408 return result 

409 

410 def testMappingInterface(self): 

411 """Test that the mapping interface in `DataCoordinate` and (when 

412 applicable) its ``full`` property are self-consistent and consistent 

413 with the ``graph`` property. 

414 """ 

415 for n in range(5): 

416 dimensions = self.randomDimensionSubset() 

417 dataIds = self.randomDataIds(n=1).subset(dimensions) 

418 split = self.splitByStateFlags(dataIds) 

419 for dataId in split.chain(): 

420 with self.subTest(dataId=dataId): 

421 self.assertEqual(list(dataId.values()), [dataId[d] for d in dataId.keys()]) 

422 self.assertEqual(list(dataId.values()), [dataId[d.name] for d in dataId.keys()]) 

423 self.assertEqual(dataId.keys(), dataId.graph.required) 

424 for dataId in itertools.chain(split.complete, split.expanded): 

425 with self.subTest(dataId=dataId): 

426 self.assertTrue(dataId.hasFull()) 

427 self.assertEqual(dataId.graph.dimensions, dataId.full.keys()) 

428 self.assertEqual(list(dataId.full.values()), [dataId[k] for k in dataId.graph.dimensions]) 

429 

430 def testEquality(self): 

431 """Test that different `DataCoordinate` instances with different state 

432 flags can be compared with each other and other mappings. 

433 """ 

434 dataIds = self.randomDataIds(n=2) 

435 split = self.splitByStateFlags(dataIds) 

436 # Iterate over all combinations of different states of DataCoordinate, 

437 # with the same underlying data ID values. 

438 for a0, b0 in itertools.combinations(split.chain(0), 2): 

439 self.assertEqual(a0, b0) 

440 self.assertEqual(a0, b0.byName()) 

441 self.assertEqual(a0.byName(), b0) 

442 # Same thing, for a different data ID value. 

443 for a1, b1 in itertools.combinations(split.chain(1), 2): 

444 self.assertEqual(a1, b1) 

445 self.assertEqual(a1, b1.byName()) 

446 self.assertEqual(a1.byName(), b1) 

447 # Iterate over all combinations of different states of DataCoordinate, 

448 # with different underlying data ID values. 

449 for a0, b1 in itertools.product(split.chain(0), split.chain(1)): 

450 self.assertNotEqual(a0, b1) 

451 self.assertNotEqual(a1, b0) 

452 self.assertNotEqual(a0, b1.byName()) 

453 self.assertNotEqual(a0.byName(), b1) 

454 self.assertNotEqual(a1, b0.byName()) 

455 self.assertNotEqual(a1.byName(), b0) 

456 

457 def testStandardize(self): 

458 """Test constructing a DataCoordinate from many different kinds of 

459 input via `DataCoordinate.standardize` and `DataCoordinate.subset`. 

460 """ 

461 for n in range(5): 

462 dimensions = self.randomDimensionSubset() 

463 dataIds = self.randomDataIds(n=1).subset(dimensions) 

464 split = self.splitByStateFlags(dataIds) 

465 for m, dataId in enumerate(split.chain()): 

466 # Passing in any kind of DataCoordinate alone just returns 

467 # that object. 

468 self.assertIs(dataId, DataCoordinate.standardize(dataId)) 

469 # Same if we also explicitly pass the dimensions we want. 

470 self.assertIs(dataId, DataCoordinate.standardize(dataId, graph=dataId.graph)) 

471 # Same if we pass the dimensions and some irrelevant 

472 # kwargs. 

473 self.assertIs(dataId, DataCoordinate.standardize(dataId, graph=dataId.graph, htm7=12)) 

474 # Test constructing a new data ID from this one with a 

475 # subset of the dimensions. 

476 # This is not possible for some combinations of 

477 # dimensions if hasFull is False (see 

478 # `DataCoordinate.subset` docs). 

479 newDimensions = self.randomDimensionSubset(n=1, graph=dataId.graph) 

480 if dataId.hasFull() or dataId.graph.required.issuperset(newDimensions.required): 

481 newDataIds = [ 

482 dataId.subset(newDimensions), 

483 DataCoordinate.standardize(dataId, graph=newDimensions), 

484 DataCoordinate.standardize(dataId, graph=newDimensions, htm7=12), 

485 ] 

486 for newDataId in newDataIds: 

487 with self.subTest(newDataId=newDataId, type=type(dataId)): 

488 commonKeys = dataId.keys() & newDataId.keys() 

489 self.assertTrue(commonKeys) 

490 self.assertEqual( 

491 [newDataId[k] for k in commonKeys], 

492 [dataId[k] for k in commonKeys], 

493 ) 

494 # This should never "downgrade" from 

495 # Complete to Minimal or Expanded to Complete. 

496 if dataId.hasRecords(): 

497 self.assertTrue(newDataId.hasRecords()) 

498 if dataId.hasFull(): 

499 self.assertTrue(newDataId.hasFull()) 

500 # Start from a complete data ID, and pass its values in via several 

501 # different ways that should be equivalent. 

502 for dataId in split.complete: 

503 # Split the keys (dimension names) into two random subsets, so 

504 # we can pass some as kwargs below. 

505 keys1 = set(self.rng.sample(list(dataId.graph.dimensions.names), 

506 len(dataId.graph.dimensions)//2)) 

507 keys2 = dataId.graph.dimensions.names - keys1 

508 newCompleteDataIds = [ 

509 DataCoordinate.standardize(dataId.full.byName(), universe=dataId.universe), 

510 DataCoordinate.standardize(dataId.full.byName(), graph=dataId.graph), 

511 DataCoordinate.standardize(DataCoordinate.makeEmpty(dataId.graph.universe), 

512 **dataId.full.byName()), 

513 DataCoordinate.standardize(DataCoordinate.makeEmpty(dataId.graph.universe), 

514 graph=dataId.graph, **dataId.full.byName()), 

515 DataCoordinate.standardize(**dataId.full.byName(), universe=dataId.universe), 

516 DataCoordinate.standardize(graph=dataId.graph, **dataId.full.byName()), 

517 DataCoordinate.standardize( 

518 {k: dataId[k] for k in keys1}, 

519 universe=dataId.universe, 

520 **{k: dataId[k] for k in keys2} 

521 ), 

522 DataCoordinate.standardize( 

523 {k: dataId[k] for k in keys1}, 

524 graph=dataId.graph, 

525 **{k: dataId[k] for k in keys2} 

526 ), 

527 ] 

528 for newDataId in newCompleteDataIds: 

529 with self.subTest(dataId=dataId, newDataId=newDataId, type=type(dataId)): 

530 self.assertEqual(dataId, newDataId) 

531 self.assertTrue(newDataId.hasFull()) 

532 

533 def testRegions(self): 

534 """Test that data IDs for a few known dimensions have the expected 

535 regions. 

536 """ 

537 for dataId in self.randomDataIds(n=4).subset( 

538 DimensionGraph(self.allDataIds.universe, names=["visit"])): 

539 self.assertIsNotNone(dataId.region) 

540 self.assertEqual(dataId.graph.spatial.names, {"visit"}) 

541 self.assertEqual(dataId.region, dataId.records["visit"].region) 

542 for dataId in self.randomDataIds(n=4).subset( 

543 DimensionGraph(self.allDataIds.universe, names=["visit", "detector"])): 

544 self.assertIsNotNone(dataId.region) 

545 self.assertEqual(dataId.graph.spatial.names, {"visit_detector_region"}) 

546 self.assertEqual(dataId.region, dataId.records["visit_detector_region"].region) 

547 for dataId in self.randomDataIds(n=4).subset( 

548 DimensionGraph(self.allDataIds.universe, names=["tract"])): 

549 self.assertIsNotNone(dataId.region) 

550 self.assertEqual(dataId.graph.spatial.names, {"tract"}) 

551 self.assertEqual(dataId.region, dataId.records["tract"].region) 

552 for dataId in self.randomDataIds(n=4).subset( 

553 DimensionGraph(self.allDataIds.universe, names=["patch"])): 

554 self.assertIsNotNone(dataId.region) 

555 self.assertEqual(dataId.graph.spatial.names, {"patch"}) 

556 self.assertEqual(dataId.region, dataId.records["patch"].region) 

557 

558 def testTimespans(self): 

559 """Test that data IDs for a few known dimensions have the expected 

560 timespans. 

561 """ 

562 for dataId in self.randomDataIds(n=4).subset( 

563 DimensionGraph(self.allDataIds.universe, names=["visit"])): 

564 self.assertIsNotNone(dataId.timespan) 

565 self.assertEqual(dataId.graph.temporal.names, {"visit"}) 

566 self.assertEqual(dataId.timespan, dataId.records["visit"].timespan) 

567 

568 def testIterableStatusFlags(self): 

569 """Test that DataCoordinateSet and DataCoordinateSequence compute 

570 their hasFull and hasRecords flags correctly from their elements. 

571 """ 

572 dataIds = self.randomDataIds(n=10) 

573 split = self.splitByStateFlags(dataIds) 

574 for cls in (DataCoordinateSet, DataCoordinateSequence): 

575 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=True).hasFull()) 

576 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=False).hasFull()) 

577 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=True).hasRecords()) 

578 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=False).hasRecords()) 

579 self.assertTrue(cls(split.complete, graph=dataIds.graph, check=True).hasFull()) 

580 self.assertTrue(cls(split.complete, graph=dataIds.graph, check=False).hasFull()) 

581 self.assertFalse(cls(split.complete, graph=dataIds.graph, check=True).hasRecords()) 

582 self.assertFalse(cls(split.complete, graph=dataIds.graph, check=False).hasRecords()) 

583 with self.assertRaises(ValueError): 

584 cls(split.complete, graph=dataIds.graph, hasRecords=True, check=True) 

585 self.assertEqual(cls(split.minimal, graph=dataIds.graph, check=True).hasFull(), 

586 not dataIds.graph.implied) 

587 self.assertEqual(cls(split.minimal, graph=dataIds.graph, check=False).hasFull(), 

588 not dataIds.graph.implied) 

589 self.assertFalse(cls(split.minimal, graph=dataIds.graph, check=True).hasRecords()) 

590 self.assertFalse(cls(split.minimal, graph=dataIds.graph, check=False).hasRecords()) 

591 with self.assertRaises(ValueError): 

592 cls(split.minimal, graph=dataIds.graph, hasRecords=True, check=True) 

593 if dataIds.graph.implied: 

594 with self.assertRaises(ValueError): 

595 cls(split.minimal, graph=dataIds.graph, hasFull=True, check=True) 

596 

597 def testSetOperations(self): 

598 """Test for self-consistency across DataCoordinateSet's operations. 

599 """ 

600 c = self.randomDataIds(n=10).toSet() 

601 a = self.randomDataIds(n=20).toSet() | c 

602 b = self.randomDataIds(n=20).toSet() | c 

603 # Make sure we don't have a particularly unlucky random seed, since 

604 # that would make a lot of this test uninteresting. 

605 self.assertNotEqual(a, b) 

606 self.assertGreater(len(a), 0) 

607 self.assertGreater(len(b), 0) 

608 # The rest of the tests should not depend on the random seed. 

609 self.assertEqual(a, a) 

610 self.assertNotEqual(a, a.toSequence()) 

611 self.assertEqual(a, a.toSequence().toSet()) 

612 self.assertEqual(a, a.toSequence().toSet()) 

613 self.assertEqual(b, b) 

614 self.assertNotEqual(b, b.toSequence()) 

615 self.assertEqual(b, b.toSequence().toSet()) 

616 self.assertEqual(a & b, a.intersection(b)) 

617 self.assertLessEqual(a & b, a) 

618 self.assertLessEqual(a & b, b) 

619 self.assertEqual(a | b, a.union(b)) 

620 self.assertGreaterEqual(a | b, a) 

621 self.assertGreaterEqual(a | b, b) 

622 self.assertEqual(a - b, a.difference(b)) 

623 self.assertLessEqual(a - b, a) 

624 self.assertLessEqual(b - a, b) 

625 self.assertEqual(a ^ b, a.symmetric_difference(b)) 

626 self.assertGreaterEqual(a ^ b, (a | b) - (a & b)) 

627 

628 

629if __name__ == "__main__": 629 ↛ 630line 629 didn't jump to line 630, because the condition on line 629 was never true

630 unittest.main()