Coverage for tests/test_dimensions.py : 11%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import unittest
23import copy
24from dataclasses import dataclass
25import os
26import pickle
27from random import Random
28import itertools
29from typing import Iterator, Optional
31from lsst.daf.butler import (
32 DataCoordinate,
33 DataCoordinateSequence,
34 DataCoordinateSet,
35 Dimension,
36 DimensionGraph,
37 DimensionUniverse,
38 makeDimensionElementTableSpec,
39 NamedKeyDict,
40 NamedValueSet,
41 Registry,
42 YamlRepoImportBackend,
43)
44from lsst.daf.butler.registry import RegistryConfig
46DIMENSION_DATA_FILE = os.path.normpath(os.path.join(os.path.dirname(__file__),
47 "data", "registry", "hsc-rc2-subset.yaml"))
50def loadDimensionData() -> DataCoordinateSequence:
51 """Load dimension data from an export file included in the code repository.
53 Returns
54 -------
55 dataIds : `DataCoordinateSet`
56 A set containing all data IDs in the export file.
57 """
58 # Create an in-memory SQLite database and Registry just to import the YAML
59 # data and retreive it as a set of DataCoordinate objects.
60 config = RegistryConfig()
61 config["db"] = "sqlite://"
62 registry = Registry.fromConfig(config, create=True)
63 with open(DIMENSION_DATA_FILE, 'r') as stream:
64 backend = YamlRepoImportBackend(stream, registry)
65 backend.register()
66 backend.load(datastore=None)
67 dimensions = DimensionGraph(registry.dimensions, names=["visit", "detector", "tract", "patch"])
68 return DataCoordinateSequence(
69 dataIds=tuple(registry.queryDimensions(dimensions, expand=True)),
70 graph=dimensions,
71 hasFull=True,
72 hasRecords=True,
73 )
76class DimensionTestCase(unittest.TestCase):
77 """Tests for dimensions.
79 All tests here rely on the content of ``config/dimensions.yaml``, either
80 to test that the definitions there are read in properly or just as generic
81 data for testing various operations.
82 """
84 def setUp(self):
85 self.universe = DimensionUniverse()
87 def checkGraphInvariants(self, graph):
88 elements = list(graph.elements)
89 for n, element in enumerate(elements):
90 # Ordered comparisons on graphs behave like sets.
91 self.assertLessEqual(element.graph, graph)
92 # Ordered comparisons on elements correspond to the ordering within
93 # a DimensionUniverse (topological, with deterministic
94 # tiebreakers).
95 for other in elements[:n]:
96 self.assertLess(other, element)
97 self.assertLessEqual(other, element)
98 for other in elements[n + 1:]:
99 self.assertGreater(other, element)
100 self.assertGreaterEqual(other, element)
101 if isinstance(element, Dimension):
102 self.assertEqual(element.graph.required, element.required)
103 self.assertEqual(DimensionGraph(self.universe, graph.required), graph)
104 self.assertCountEqual(graph.required,
105 [dimension for dimension in graph.dimensions
106 if not any(dimension in other.graph.implied for other in graph.elements)])
107 self.assertCountEqual(graph.implied, graph.dimensions - graph.required)
108 self.assertCountEqual(graph.dimensions,
109 [element for element in graph.elements
110 if isinstance(element, Dimension)])
111 self.assertCountEqual(graph.dimensions, itertools.chain(graph.required, graph.implied))
112 # Check primary key traversal order: each element should follow any it
113 # requires, and element that is implied by any other in the graph
114 # follow at least one of those.
115 seen = NamedValueSet()
116 for element in graph.primaryKeyTraversalOrder:
117 with self.subTest(required=graph.required, implied=graph.implied, element=element):
118 seen.add(element)
119 self.assertLessEqual(element.graph.required, seen)
120 if element in graph.implied:
121 self.assertTrue(any(element in s.implied for s in seen))
122 self.assertCountEqual(seen, graph.elements)
123 # Test encoding and decoding of DimensionGraphs to bytes.
124 encoded = graph.encode()
125 self.assertEqual(len(encoded), self.universe.getEncodeLength())
126 self.assertEqual(DimensionGraph.decode(encoded, universe=self.universe), graph)
128 def testConfigRead(self):
129 self.assertEqual(self.universe.dimensions.names,
130 {"instrument", "visit", "visit_system", "exposure", "detector",
131 "physical_filter", "abstract_filter", "subfilter", "calibration_label",
132 "skymap", "tract", "patch", "htm7", "htm9"})
134 def testGraphs(self):
135 self.checkGraphInvariants(self.universe.empty)
136 self.checkGraphInvariants(self.universe)
137 for element in self.universe.elements:
138 self.checkGraphInvariants(element.graph)
140 def testInstrumentDimensions(self):
141 graph = DimensionGraph(self.universe, names=("exposure", "detector", "visit", "calibration_label"))
142 self.assertCountEqual(graph.dimensions.names,
143 ("instrument", "exposure", "detector", "calibration_label",
144 "visit", "physical_filter", "abstract_filter", "visit_system"))
145 self.assertCountEqual(graph.required.names, ("instrument", "exposure", "detector",
146 "calibration_label", "visit"))
147 self.assertCountEqual(graph.implied.names, ("physical_filter", "abstract_filter", "visit_system"))
148 self.assertCountEqual(graph.elements.names - graph.dimensions.names,
149 ("visit_detector_region", "visit_definition"))
151 def testCalibrationDimensions(self):
152 graph = DimensionGraph(self.universe, names=("calibration_label", "physical_filter", "detector"))
153 self.assertCountEqual(graph.dimensions.names,
154 ("instrument", "detector", "calibration_label",
155 "physical_filter", "abstract_filter"))
156 self.assertCountEqual(graph.required.names, ("instrument", "detector", "calibration_label",
157 "physical_filter"))
158 self.assertCountEqual(graph.implied.names, ("abstract_filter",))
159 self.assertCountEqual(graph.elements.names, graph.dimensions.names)
161 def testObservationDimensions(self):
162 graph = DimensionGraph(self.universe, names=("exposure", "detector", "visit"))
163 self.assertCountEqual(graph.dimensions.names, ("instrument", "detector", "visit", "exposure",
164 "physical_filter", "abstract_filter", "visit_system"))
165 self.assertCountEqual(graph.required.names, ("instrument", "detector", "exposure", "visit"))
166 self.assertCountEqual(graph.implied.names, ("physical_filter", "abstract_filter", "visit_system"))
167 self.assertCountEqual(graph.elements.names - graph.dimensions.names,
168 ("visit_detector_region", "visit_definition"))
169 self.assertCountEqual(graph.spatial.names, ("visit_detector_region",))
170 self.assertCountEqual(graph.temporal.names, ("exposure",))
172 def testSkyMapDimensions(self):
173 graph = DimensionGraph(self.universe, names=("patch",))
174 self.assertCountEqual(graph.dimensions.names, ("skymap", "tract", "patch"))
175 self.assertCountEqual(graph.required.names, ("skymap", "tract", "patch"))
176 self.assertCountEqual(graph.implied.names, ())
177 self.assertCountEqual(graph.elements.names, graph.dimensions.names)
178 self.assertCountEqual(graph.spatial.names, ("patch",))
180 def testSubsetCalculation(self):
181 """Test that independent spatial and temporal options are computed
182 correctly.
183 """
184 graph = DimensionGraph(self.universe, names=("visit", "detector", "tract", "patch", "htm7",
185 "exposure", "calibration_label"))
186 self.assertCountEqual(graph.spatial.names,
187 ("visit_detector_region", "patch", "htm7"))
188 self.assertCountEqual(graph.temporal.names,
189 ("exposure", "calibration_label"))
191 def testSchemaGeneration(self):
192 tableSpecs = NamedKeyDict({})
193 for element in self.universe.elements:
194 if element.hasTable and element.viewOf is None:
195 tableSpecs[element] = makeDimensionElementTableSpec(element)
196 for element, tableSpec in tableSpecs.items():
197 for dep in element.required:
198 with self.subTest(element=element.name, dep=dep.name):
199 if dep != element:
200 self.assertIn(dep.name, tableSpec.fields)
201 self.assertEqual(tableSpec.fields[dep.name].dtype, dep.primaryKey.dtype)
202 self.assertEqual(tableSpec.fields[dep.name].length, dep.primaryKey.length)
203 self.assertEqual(tableSpec.fields[dep.name].nbytes, dep.primaryKey.nbytes)
204 self.assertFalse(tableSpec.fields[dep.name].nullable)
205 self.assertTrue(tableSpec.fields[dep.name].primaryKey)
206 else:
207 self.assertIn(element.primaryKey.name, tableSpec.fields)
208 self.assertEqual(tableSpec.fields[element.primaryKey.name].dtype,
209 dep.primaryKey.dtype)
210 self.assertEqual(tableSpec.fields[element.primaryKey.name].length,
211 dep.primaryKey.length)
212 self.assertEqual(tableSpec.fields[element.primaryKey.name].nbytes,
213 dep.primaryKey.nbytes)
214 self.assertFalse(tableSpec.fields[element.primaryKey.name].nullable)
215 self.assertTrue(tableSpec.fields[element.primaryKey.name].primaryKey)
216 for dep in element.implied:
217 with self.subTest(element=element.name, dep=dep.name):
218 self.assertIn(dep.name, tableSpec.fields)
219 self.assertEqual(tableSpec.fields[dep.name].dtype, dep.primaryKey.dtype)
220 self.assertFalse(tableSpec.fields[dep.name].primaryKey)
221 for foreignKey in tableSpec.foreignKeys:
222 self.assertIn(foreignKey.table, tableSpecs)
223 self.assertIn(foreignKey.table, element.graph.dimensions.names)
224 self.assertEqual(len(foreignKey.source), len(foreignKey.target))
225 for source, target in zip(foreignKey.source, foreignKey.target):
226 self.assertIn(source, tableSpec.fields.names)
227 self.assertIn(target, tableSpecs[foreignKey.table].fields.names)
228 self.assertEqual(tableSpec.fields[source].dtype,
229 tableSpecs[foreignKey.table].fields[target].dtype)
230 self.assertEqual(tableSpec.fields[source].length,
231 tableSpecs[foreignKey.table].fields[target].length)
232 self.assertEqual(tableSpec.fields[source].nbytes,
233 tableSpecs[foreignKey.table].fields[target].nbytes)
234 self.assertEqual(tuple(tableSpec.fields.names), element.RecordClass.__slots__)
236 def testPickling(self):
237 # Pickling and copying should always yield the exact same object within
238 # a single process (cross-process is impossible to test here).
239 universe1 = DimensionUniverse()
240 universe2 = pickle.loads(pickle.dumps(universe1))
241 universe3 = copy.copy(universe1)
242 universe4 = copy.deepcopy(universe1)
243 self.assertIs(universe1, universe2)
244 self.assertIs(universe1, universe3)
245 self.assertIs(universe1, universe4)
246 for element1 in universe1.elements:
247 element2 = pickle.loads(pickle.dumps(element1))
248 self.assertIs(element1, element2)
249 graph1 = element1.graph
250 graph2 = pickle.loads(pickle.dumps(graph1))
251 self.assertIs(graph1, graph2)
254@dataclass
255class SplitByStateFlags:
256 """A struct that separates data IDs with different states but the same
257 values.
258 """
260 minimal: Optional[DataCoordinateSequence] = None
261 """Data IDs that only contain values for required dimensions.
263 `DataCoordinateSequence.hasFull()` will return `True` for this if and only
264 if ``minimal.graph.implied`` has no elements.
265 `DataCoordinate.hasRecords()` will always return `False`.
266 """
268 complete: Optional[DataCoordinateSequence] = None
269 """Data IDs that contain values for all dimensions.
271 `DataCoordinateSequence.hasFull()` will always `True` and
272 `DataCoordinate.hasRecords()` will always return `True` for this attribute.
273 """
275 expanded: Optional[DataCoordinateSequence] = None
276 """Data IDs that contain values for all dimensions as well as records.
278 `DataCoordinateSequence.hasFull()` and `DataCoordinate.hasRecords()` will
279 always return `True` for this attribute.
280 """
282 def chain(self, n: Optional[int] = None) -> Iterator:
283 """Iterate over the data IDs of different types.
285 Parameters
286 ----------
287 n : `int`, optional
288 If provided (`None` is default), iterate over only the ``nth``
289 data ID in each attribute.
291 Yields
292 ------
293 dataId : `DataCoordinate`
294 A data ID from one of the attributes in this struct.
295 """
296 if n is None:
297 s = slice(None, None)
298 else:
299 s = slice(n, n + 1)
300 if self.minimal is not None:
301 yield from self.minimal[s]
302 if self.complete is not None:
303 yield from self.complete[s]
304 if self.expanded is not None:
305 yield from self.expanded[s]
308class DataCoordinateTestCase(unittest.TestCase):
310 RANDOM_SEED = 10
312 @classmethod
313 def setUpClass(cls):
314 cls.allDataIds = loadDimensionData()
316 def setUp(self):
317 self.rng = Random(self.RANDOM_SEED)
319 def randomDataIds(self, n: int, dataIds: Optional[DataCoordinateSequence] = None):
320 """Select random data IDs from those loaded from test data.
322 Parameters
323 ----------
324 n : `int`
325 Number of data IDs to select.
326 dataIds : `DataCoordinateSequence`, optional
327 Data IDs to select from. Defaults to ``self.allDataIds``.
329 Returns
330 -------
331 selected : `DataCoordinateSequence`
332 ``n`` Data IDs randomly selected from ``dataIds`` with replacement.
333 """
334 if dataIds is None:
335 dataIds = self.allDataIds
336 return DataCoordinateSequence(self.rng.sample(dataIds, n),
337 graph=dataIds.graph,
338 hasFull=dataIds.hasFull(),
339 hasRecords=dataIds.hasRecords(),
340 check=False)
342 def randomDimensionSubset(self, n: int = 3, graph: Optional[DimensionGraph] = None) -> DimensionGraph:
343 """Generate a random `DimensionGraph` that has a subset of the
344 dimensions in a given one.
346 Parameters
347 ----------
348 n : `int`
349 Number of dimensions to select, before automatic expansion by
350 `DimensionGraph`.
351 dataIds : `DimensionGraph`, optional
352 Dimensions to select ffrom. Defaults to ``self.allDataIds.graph``.
354 Returns
355 -------
356 selected : `DimensionGraph`
357 ``n`` or more dimensions randomly selected from ``graph`` with
358 replacement.
359 """
360 if graph is None:
361 graph = self.allDataIds.graph
362 return DimensionGraph(
363 graph.universe,
364 names=self.rng.sample(list(graph.dimensions.names), max(n, len(graph.dimensions)))
365 )
367 def splitByStateFlags(self, dataIds: Optional[DataCoordinateSequence] = None, *,
368 expanded: bool = True,
369 complete: bool = True,
370 minimal: bool = True) -> SplitByStateFlags:
371 """Given a sequence of data IDs, generate new equivalent sequences
372 containing less information.
374 Parameters
375 ----------
376 dataIds : `DataCoordinateSequence`, optional.
377 Data IDs to start from. Defaults to ``self.allDataIds``.
378 ``dataIds.hasRecords()`` and ``dataIds.hasFull()`` must both return
379 `True`.
380 expanded : `bool`, optional
381 If `True` (default) include the original data IDs that contain all
382 information in the result.
383 complete : `bool`, optional
384 If `True` (default) include data IDs for which ``hasFull()``
385 returns `True` but ``hasRecords()`` does not.
386 minimal : `bool`, optional
387 If `True` (default) include data IDS that only contain values for
388 required dimensions, for which ``hasFull()`` may not return `True`.
390 Returns
391 -------
392 split : `SplitByStateFlags`
393 A dataclass holding the indicated data IDs in attributes that
394 correspond to the boolean keyword arguments.
395 """
396 if dataIds is None:
397 dataIds = self.allDataIds
398 assert dataIds.hasFull() and dataIds.hasRecords()
399 result = SplitByStateFlags(expanded=dataIds)
400 if complete:
401 result.complete = DataCoordinateSequence(
402 [DataCoordinate.standardize(e.full.byName(), graph=dataIds.graph) for e in result.expanded],
403 graph=dataIds.graph
404 )
405 self.assertTrue(result.complete.hasFull())
406 self.assertFalse(result.complete.hasRecords())
407 if minimal:
408 result.minimal = DataCoordinateSequence(
409 [DataCoordinate.standardize(e.byName(), graph=dataIds.graph) for e in result.expanded],
410 graph=dataIds.graph
411 )
412 self.assertEqual(result.minimal.hasFull(), not dataIds.graph.implied)
413 self.assertFalse(result.minimal.hasRecords())
414 if not expanded:
415 result.expanded = None
416 return result
418 def testMappingInterface(self):
419 """Test that the mapping interface in `DataCoordinate` and (when
420 applicable) its ``full`` property are self-consistent and consistent
421 with the ``graph`` property.
422 """
423 for n in range(5):
424 dimensions = self.randomDimensionSubset()
425 dataIds = self.randomDataIds(n=1).subset(dimensions)
426 split = self.splitByStateFlags(dataIds)
427 for dataId in split.chain():
428 with self.subTest(dataId=dataId):
429 self.assertEqual(list(dataId.values()), [dataId[d] for d in dataId.keys()])
430 self.assertEqual(list(dataId.values()), [dataId[d.name] for d in dataId.keys()])
431 self.assertEqual(dataId.keys(), dataId.graph.required)
432 for dataId in itertools.chain(split.complete, split.expanded):
433 with self.subTest(dataId=dataId):
434 self.assertTrue(dataId.hasFull())
435 self.assertEqual(dataId.graph.dimensions, dataId.full.keys())
436 self.assertEqual(list(dataId.full.values()), [dataId[k] for k in dataId.graph.dimensions])
438 def testEquality(self):
439 """Test that different `DataCoordinate` instances with different state
440 flags can be compared with each other and other mappings.
441 """
442 dataIds = self.randomDataIds(n=2)
443 split = self.splitByStateFlags(dataIds)
444 # Iterate over all combinations of different states of DataCoordinate,
445 # with the same underlying data ID values.
446 for a0, b0 in itertools.combinations(split.chain(0), 2):
447 self.assertEqual(a0, b0)
448 self.assertEqual(a0, b0.byName())
449 self.assertEqual(a0.byName(), b0)
450 # Same thing, for a different data ID value.
451 for a1, b1 in itertools.combinations(split.chain(1), 2):
452 self.assertEqual(a1, b1)
453 self.assertEqual(a1, b1.byName())
454 self.assertEqual(a1.byName(), b1)
455 # Iterate over all combinations of different states of DataCoordinate,
456 # with different underlying data ID values.
457 for a0, b1 in itertools.product(split.chain(0), split.chain(1)):
458 self.assertNotEqual(a0, b1)
459 self.assertNotEqual(a1, b0)
460 self.assertNotEqual(a0, b1.byName())
461 self.assertNotEqual(a0.byName(), b1)
462 self.assertNotEqual(a1, b0.byName())
463 self.assertNotEqual(a1.byName(), b0)
465 def testStandardize(self):
466 """Test constructing a DataCoordinate from many different kinds of
467 input via `DataCoordinate.standardize` and `DataCoordinate.subset`.
468 """
469 for n in range(5):
470 dimensions = self.randomDimensionSubset()
471 dataIds = self.randomDataIds(n=1).subset(dimensions)
472 split = self.splitByStateFlags(dataIds)
473 for m, dataId in enumerate(split.chain()):
474 # Passing in any kind of DataCoordinate alone just returns
475 # that object.
476 self.assertIs(dataId, DataCoordinate.standardize(dataId))
477 # Same if we also explicitly pass the dimensions we want.
478 self.assertIs(dataId, DataCoordinate.standardize(dataId, graph=dataId.graph))
479 # Same if we pass the dimensions and some irrelevant
480 # kwargs.
481 self.assertIs(dataId, DataCoordinate.standardize(dataId, graph=dataId.graph, htm7=12))
482 # Test constructing a new data ID from this one with a
483 # subset of the dimensions.
484 # This is not possible for some combinations of
485 # dimensions if hasFull is False (see
486 # `DataCoordinate.subset` docs).
487 newDimensions = self.randomDimensionSubset(n=1, graph=dataId.graph)
488 if dataId.hasFull() or dataId.graph.required.issuperset(newDimensions.required):
489 newDataIds = [
490 dataId.subset(newDimensions),
491 DataCoordinate.standardize(dataId, graph=newDimensions),
492 DataCoordinate.standardize(dataId, graph=newDimensions, htm7=12),
493 ]
494 for newDataId in newDataIds:
495 with self.subTest(newDataId=newDataId, type=type(dataId)):
496 commonKeys = dataId.keys() & newDataId.keys()
497 self.assertTrue(commonKeys)
498 self.assertEqual(
499 [newDataId[k] for k in commonKeys],
500 [dataId[k] for k in commonKeys],
501 )
502 # This should never "downgrade" from
503 # Complete to Minimal or Expanded to Complete.
504 if dataId.hasRecords():
505 self.assertTrue(newDataId.hasRecords())
506 if dataId.hasFull():
507 self.assertTrue(newDataId.hasFull())
508 # Start from a complete data ID, and pass its values in via several
509 # different ways that should be equivalent.
510 for dataId in split.complete:
511 # Split the keys (dimension names) into two random subsets, so
512 # we can pass some as kwargs below.
513 keys1 = set(self.rng.sample(list(dataId.graph.dimensions.names),
514 len(dataId.graph.dimensions)//2))
515 keys2 = dataId.graph.dimensions.names - keys1
516 newCompleteDataIds = [
517 DataCoordinate.standardize(dataId.full.byName(), universe=dataId.universe),
518 DataCoordinate.standardize(dataId.full.byName(), graph=dataId.graph),
519 DataCoordinate.standardize(DataCoordinate.makeEmpty(dataId.graph.universe),
520 **dataId.full.byName()),
521 DataCoordinate.standardize(DataCoordinate.makeEmpty(dataId.graph.universe),
522 graph=dataId.graph, **dataId.full.byName()),
523 DataCoordinate.standardize(**dataId.full.byName(), universe=dataId.universe),
524 DataCoordinate.standardize(graph=dataId.graph, **dataId.full.byName()),
525 DataCoordinate.standardize(
526 {k: dataId[k] for k in keys1},
527 universe=dataId.universe,
528 **{k: dataId[k] for k in keys2}
529 ),
530 DataCoordinate.standardize(
531 {k: dataId[k] for k in keys1},
532 graph=dataId.graph,
533 **{k: dataId[k] for k in keys2}
534 ),
535 ]
536 for newDataId in newCompleteDataIds:
537 with self.subTest(dataId=dataId, newDataId=newDataId, type=type(dataId)):
538 self.assertEqual(dataId, newDataId)
539 self.assertTrue(newDataId.hasFull())
541 def testRegions(self):
542 """Test that data IDs for a few known dimensions have the expected
543 regions.
544 """
545 for dataId in self.randomDataIds(n=4).subset(
546 DimensionGraph(self.allDataIds.universe, names=["visit"])):
547 self.assertIsNotNone(dataId.region)
548 self.assertEqual(dataId.graph.spatial.names, {"visit"})
549 self.assertEqual(dataId.region, dataId.records["visit"].region)
550 for dataId in self.randomDataIds(n=4).subset(
551 DimensionGraph(self.allDataIds.universe, names=["visit", "detector"])):
552 self.assertIsNotNone(dataId.region)
553 self.assertEqual(dataId.graph.spatial.names, {"visit_detector_region"})
554 self.assertEqual(dataId.region, dataId.records["visit_detector_region"].region)
555 for dataId in self.randomDataIds(n=4).subset(
556 DimensionGraph(self.allDataIds.universe, names=["tract"])):
557 self.assertIsNotNone(dataId.region)
558 self.assertEqual(dataId.graph.spatial.names, {"tract"})
559 self.assertEqual(dataId.region, dataId.records["tract"].region)
560 for dataId in self.randomDataIds(n=4).subset(
561 DimensionGraph(self.allDataIds.universe, names=["patch"])):
562 self.assertIsNotNone(dataId.region)
563 self.assertEqual(dataId.graph.spatial.names, {"patch"})
564 self.assertEqual(dataId.region, dataId.records["patch"].region)
566 def testTimespans(self):
567 """Test that data IDs for a few known dimensions have the expected
568 timespans.
569 """
570 for dataId in self.randomDataIds(n=4).subset(
571 DimensionGraph(self.allDataIds.universe, names=["visit"])):
572 self.assertIsNotNone(dataId.timespan)
573 self.assertEqual(dataId.graph.temporal.names, {"visit"})
574 self.assertEqual(dataId.timespan, dataId.records["visit"].timespan)
576 def testIterableStatusFlags(self):
577 """Test that DataCoordinateSet and DataCoordinateSequence compute
578 their hasFull and hasRecords flags correctly from their elements.
579 """
580 dataIds = self.randomDataIds(n=10)
581 split = self.splitByStateFlags(dataIds)
582 for cls in (DataCoordinateSet, DataCoordinateSequence):
583 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=True).hasFull())
584 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=False).hasFull())
585 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=True).hasRecords())
586 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=False).hasRecords())
587 self.assertTrue(cls(split.complete, graph=dataIds.graph, check=True).hasFull())
588 self.assertTrue(cls(split.complete, graph=dataIds.graph, check=False).hasFull())
589 self.assertFalse(cls(split.complete, graph=dataIds.graph, check=True).hasRecords())
590 self.assertFalse(cls(split.complete, graph=dataIds.graph, check=False).hasRecords())
591 with self.assertRaises(ValueError):
592 cls(split.complete, graph=dataIds.graph, hasRecords=True, check=True)
593 self.assertEqual(cls(split.minimal, graph=dataIds.graph, check=True).hasFull(),
594 not dataIds.graph.implied)
595 self.assertEqual(cls(split.minimal, graph=dataIds.graph, check=False).hasFull(),
596 not dataIds.graph.implied)
597 self.assertFalse(cls(split.minimal, graph=dataIds.graph, check=True).hasRecords())
598 self.assertFalse(cls(split.minimal, graph=dataIds.graph, check=False).hasRecords())
599 with self.assertRaises(ValueError):
600 cls(split.minimal, graph=dataIds.graph, hasRecords=True, check=True)
601 if dataIds.graph.implied:
602 with self.assertRaises(ValueError):
603 cls(split.minimal, graph=dataIds.graph, hasFull=True, check=True)
605 def testSetOperations(self):
606 """Test for self-consistency across DataCoordinateSet's operations.
607 """
608 c = self.randomDataIds(n=10).toSet()
609 a = self.randomDataIds(n=20).toSet() | c
610 b = self.randomDataIds(n=20).toSet() | c
611 # Make sure we don't have a particularly unlucky random seed, since
612 # that would make a lot of this test uninteresting.
613 self.assertNotEqual(a, b)
614 self.assertGreater(len(a), 0)
615 self.assertGreater(len(b), 0)
616 # The rest of the tests should not depend on the random seed.
617 self.assertEqual(a, a)
618 self.assertNotEqual(a, a.toSequence())
619 self.assertEqual(a, a.toSequence().toSet())
620 self.assertEqual(a, a.toSequence().toSet())
621 self.assertEqual(b, b)
622 self.assertNotEqual(b, b.toSequence())
623 self.assertEqual(b, b.toSequence().toSet())
624 self.assertEqual(a & b, a.intersection(b))
625 self.assertLessEqual(a & b, a)
626 self.assertLessEqual(a & b, b)
627 self.assertEqual(a | b, a.union(b))
628 self.assertGreaterEqual(a | b, a)
629 self.assertGreaterEqual(a | b, b)
630 self.assertEqual(a - b, a.difference(b))
631 self.assertLessEqual(a - b, a)
632 self.assertLessEqual(b - a, b)
633 self.assertEqual(a ^ b, a.symmetric_difference(b))
634 self.assertGreaterEqual(a ^ b, (a | b) - (a & b))
637if __name__ == "__main__": 637 ↛ 638line 637 didn't jump to line 638, because the condition on line 637 was never true
638 unittest.main()