Coverage for tests/test_dimensions.py: 10%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import unittest
23import copy
24from dataclasses import dataclass
25import os
26import pickle
27from random import Random
28import itertools
29from typing import Iterator, Optional
31from lsst.daf.butler import (
32 DataCoordinate,
33 DataCoordinateSequence,
34 DataCoordinateSet,
35 Dimension,
36 DimensionConfig,
37 DimensionGraph,
38 DimensionUniverse,
39 NamedKeyDict,
40 NamedValueSet,
41 Registry,
42 SpatialRegionDatabaseRepresentation,
43 TimespanDatabaseRepresentation,
44 YamlRepoImportBackend,
45)
46from lsst.daf.butler.registry import RegistryConfig
48DIMENSION_DATA_FILE = os.path.normpath(os.path.join(os.path.dirname(__file__),
49 "data", "registry", "hsc-rc2-subset.yaml"))
52def loadDimensionData() -> DataCoordinateSequence:
53 """Load dimension data from an export file included in the code repository.
55 Returns
56 -------
57 dataIds : `DataCoordinateSet`
58 A set containing all data IDs in the export file.
59 """
60 # Create an in-memory SQLite database and Registry just to import the YAML
61 # data and retreive it as a set of DataCoordinate objects.
62 config = RegistryConfig()
63 config["db"] = "sqlite://"
64 registry = Registry.createFromConfig(config)
65 with open(DIMENSION_DATA_FILE, 'r') as stream:
66 backend = YamlRepoImportBackend(stream, registry)
67 backend.register()
68 backend.load(datastore=None)
69 dimensions = DimensionGraph(registry.dimensions, names=["visit", "detector", "tract", "patch"])
70 return registry.queryDataIds(dimensions).expanded().toSequence()
73class DimensionTestCase(unittest.TestCase):
74 """Tests for dimensions.
76 All tests here rely on the content of ``config/dimensions.yaml``, either
77 to test that the definitions there are read in properly or just as generic
78 data for testing various operations.
79 """
81 def setUp(self):
82 self.universe = DimensionUniverse()
84 def checkGraphInvariants(self, graph):
85 elements = list(graph.elements)
86 for n, element in enumerate(elements):
87 # Ordered comparisons on graphs behave like sets.
88 self.assertLessEqual(element.graph, graph)
89 # Ordered comparisons on elements correspond to the ordering within
90 # a DimensionUniverse (topological, with deterministic
91 # tiebreakers).
92 for other in elements[:n]:
93 self.assertLess(other, element)
94 self.assertLessEqual(other, element)
95 for other in elements[n + 1:]:
96 self.assertGreater(other, element)
97 self.assertGreaterEqual(other, element)
98 if isinstance(element, Dimension):
99 self.assertEqual(element.graph.required, element.required)
100 self.assertEqual(DimensionGraph(self.universe, graph.required), graph)
101 self.assertCountEqual(graph.required,
102 [dimension for dimension in graph.dimensions
103 if not any(dimension in other.graph.implied for other in graph.elements)])
104 self.assertCountEqual(graph.implied, graph.dimensions - graph.required)
105 self.assertCountEqual(graph.dimensions,
106 [element for element in graph.elements
107 if isinstance(element, Dimension)])
108 self.assertCountEqual(graph.dimensions, itertools.chain(graph.required, graph.implied))
109 # Check primary key traversal order: each element should follow any it
110 # requires, and element that is implied by any other in the graph
111 # follow at least one of those.
112 seen = NamedValueSet()
113 for element in graph.primaryKeyTraversalOrder:
114 with self.subTest(required=graph.required, implied=graph.implied, element=element):
115 seen.add(element)
116 self.assertLessEqual(element.graph.required, seen)
117 if element in graph.implied:
118 self.assertTrue(any(element in s.implied for s in seen))
119 self.assertCountEqual(seen, graph.elements)
121 def testConfigPresent(self):
122 config = self.universe.dimensionConfig
123 self.assertIsInstance(config, DimensionConfig)
125 def testConfigRead(self):
126 self.assertEqual(self.universe.getStaticDimensions().names,
127 {"instrument", "visit", "visit_system", "exposure", "detector",
128 "physical_filter", "band", "subfilter",
129 "skymap", "tract", "patch"} | {f"htm{level}" for level in range(25)})
131 def testGraphs(self):
132 self.checkGraphInvariants(self.universe.empty)
133 for element in self.universe.getStaticElements():
134 self.checkGraphInvariants(element.graph)
136 def testInstrumentDimensions(self):
137 graph = DimensionGraph(self.universe, names=("exposure", "detector", "visit"))
138 self.assertCountEqual(graph.dimensions.names,
139 ("instrument", "exposure", "detector",
140 "visit", "physical_filter", "band", "visit_system"))
141 self.assertCountEqual(graph.required.names, ("instrument", "exposure", "detector", "visit"))
142 self.assertCountEqual(graph.implied.names, ("physical_filter", "band", "visit_system"))
143 self.assertCountEqual(graph.elements.names - graph.dimensions.names,
144 ("visit_detector_region", "visit_definition"))
145 self.assertCountEqual(graph.governors.names, {"instrument"})
147 def testCalibrationDimensions(self):
148 graph = DimensionGraph(self.universe, names=("physical_filter", "detector"))
149 self.assertCountEqual(graph.dimensions.names,
150 ("instrument", "detector", "physical_filter", "band"))
151 self.assertCountEqual(graph.required.names, ("instrument", "detector", "physical_filter"))
152 self.assertCountEqual(graph.implied.names, ("band",))
153 self.assertCountEqual(graph.elements.names, graph.dimensions.names)
154 self.assertCountEqual(graph.governors.names, {"instrument"})
156 def testObservationDimensions(self):
157 graph = DimensionGraph(self.universe, names=("exposure", "detector", "visit"))
158 self.assertCountEqual(graph.dimensions.names, ("instrument", "detector", "visit", "exposure",
159 "physical_filter", "band", "visit_system"))
160 self.assertCountEqual(graph.required.names, ("instrument", "detector", "exposure", "visit"))
161 self.assertCountEqual(graph.implied.names, ("physical_filter", "band", "visit_system"))
162 self.assertCountEqual(graph.elements.names - graph.dimensions.names,
163 ("visit_detector_region", "visit_definition"))
164 self.assertCountEqual(graph.spatial.names, ("observation_regions",))
165 self.assertCountEqual(graph.temporal.names, ("observation_timespans",))
166 self.assertCountEqual(graph.governors.names, {"instrument"})
167 self.assertEqual(graph.spatial.names, {"observation_regions"})
168 self.assertEqual(graph.temporal.names, {"observation_timespans"})
169 self.assertEqual(next(iter(graph.spatial)).governor, self.universe["instrument"])
170 self.assertEqual(next(iter(graph.temporal)).governor, self.universe["instrument"])
172 def testSkyMapDimensions(self):
173 graph = DimensionGraph(self.universe, names=("patch",))
174 self.assertCountEqual(graph.dimensions.names, ("skymap", "tract", "patch"))
175 self.assertCountEqual(graph.required.names, ("skymap", "tract", "patch"))
176 self.assertCountEqual(graph.implied.names, ())
177 self.assertCountEqual(graph.elements.names, graph.dimensions.names)
178 self.assertCountEqual(graph.spatial.names, ("skymap_regions",))
179 self.assertCountEqual(graph.governors.names, {"skymap"})
180 self.assertEqual(graph.spatial.names, {"skymap_regions"})
181 self.assertEqual(next(iter(graph.spatial)).governor, self.universe["skymap"])
183 def testSubsetCalculation(self):
184 """Test that independent spatial and temporal options are computed
185 correctly.
186 """
187 graph = DimensionGraph(self.universe, names=("visit", "detector", "tract", "patch", "htm7",
188 "exposure"))
189 self.assertCountEqual(graph.spatial.names,
190 ("observation_regions", "skymap_regions", "htm"))
191 self.assertCountEqual(graph.temporal.names,
192 ("observation_timespans",))
194 def testSchemaGeneration(self):
195 tableSpecs = NamedKeyDict({})
196 for element in self.universe.getStaticElements():
197 if element.hasTable and element.viewOf is None:
198 tableSpecs[element] = element.RecordClass.fields.makeTableSpec(
199 RegionReprClass=SpatialRegionDatabaseRepresentation,
200 TimespanReprClass=TimespanDatabaseRepresentation.Compound
201 )
202 for element, tableSpec in tableSpecs.items():
203 for dep in element.required:
204 with self.subTest(element=element.name, dep=dep.name):
205 if dep != element:
206 self.assertIn(dep.name, tableSpec.fields)
207 self.assertEqual(tableSpec.fields[dep.name].dtype, dep.primaryKey.dtype)
208 self.assertEqual(tableSpec.fields[dep.name].length, dep.primaryKey.length)
209 self.assertEqual(tableSpec.fields[dep.name].nbytes, dep.primaryKey.nbytes)
210 self.assertFalse(tableSpec.fields[dep.name].nullable)
211 self.assertTrue(tableSpec.fields[dep.name].primaryKey)
212 else:
213 self.assertIn(element.primaryKey.name, tableSpec.fields)
214 self.assertEqual(tableSpec.fields[element.primaryKey.name].dtype,
215 dep.primaryKey.dtype)
216 self.assertEqual(tableSpec.fields[element.primaryKey.name].length,
217 dep.primaryKey.length)
218 self.assertEqual(tableSpec.fields[element.primaryKey.name].nbytes,
219 dep.primaryKey.nbytes)
220 self.assertFalse(tableSpec.fields[element.primaryKey.name].nullable)
221 self.assertTrue(tableSpec.fields[element.primaryKey.name].primaryKey)
222 for dep in element.implied:
223 with self.subTest(element=element.name, dep=dep.name):
224 self.assertIn(dep.name, tableSpec.fields)
225 self.assertEqual(tableSpec.fields[dep.name].dtype, dep.primaryKey.dtype)
226 self.assertFalse(tableSpec.fields[dep.name].primaryKey)
227 for foreignKey in tableSpec.foreignKeys:
228 self.assertIn(foreignKey.table, tableSpecs)
229 self.assertIn(foreignKey.table, element.graph.dimensions.names)
230 self.assertEqual(len(foreignKey.source), len(foreignKey.target))
231 for source, target in zip(foreignKey.source, foreignKey.target):
232 self.assertIn(source, tableSpec.fields.names)
233 self.assertIn(target, tableSpecs[foreignKey.table].fields.names)
234 self.assertEqual(tableSpec.fields[source].dtype,
235 tableSpecs[foreignKey.table].fields[target].dtype)
236 self.assertEqual(tableSpec.fields[source].length,
237 tableSpecs[foreignKey.table].fields[target].length)
238 self.assertEqual(tableSpec.fields[source].nbytes,
239 tableSpecs[foreignKey.table].fields[target].nbytes)
241 def testPickling(self):
242 # Pickling and copying should always yield the exact same object within
243 # a single process (cross-process is impossible to test here).
244 universe1 = DimensionUniverse()
245 universe2 = pickle.loads(pickle.dumps(universe1))
246 universe3 = copy.copy(universe1)
247 universe4 = copy.deepcopy(universe1)
248 self.assertIs(universe1, universe2)
249 self.assertIs(universe1, universe3)
250 self.assertIs(universe1, universe4)
251 for element1 in universe1.getStaticElements():
252 element2 = pickle.loads(pickle.dumps(element1))
253 self.assertIs(element1, element2)
254 graph1 = element1.graph
255 graph2 = pickle.loads(pickle.dumps(graph1))
256 self.assertIs(graph1, graph2)
259@dataclass
260class SplitByStateFlags:
261 """A struct that separates data IDs with different states but the same
262 values.
263 """
265 minimal: Optional[DataCoordinateSequence] = None
266 """Data IDs that only contain values for required dimensions.
268 `DataCoordinateSequence.hasFull()` will return `True` for this if and only
269 if ``minimal.graph.implied`` has no elements.
270 `DataCoordinate.hasRecords()` will always return `False`.
271 """
273 complete: Optional[DataCoordinateSequence] = None
274 """Data IDs that contain values for all dimensions.
276 `DataCoordinateSequence.hasFull()` will always `True` and
277 `DataCoordinate.hasRecords()` will always return `True` for this attribute.
278 """
280 expanded: Optional[DataCoordinateSequence] = None
281 """Data IDs that contain values for all dimensions as well as records.
283 `DataCoordinateSequence.hasFull()` and `DataCoordinate.hasRecords()` will
284 always return `True` for this attribute.
285 """
287 def chain(self, n: Optional[int] = None) -> Iterator:
288 """Iterate over the data IDs of different types.
290 Parameters
291 ----------
292 n : `int`, optional
293 If provided (`None` is default), iterate over only the ``nth``
294 data ID in each attribute.
296 Yields
297 ------
298 dataId : `DataCoordinate`
299 A data ID from one of the attributes in this struct.
300 """
301 if n is None:
302 s = slice(None, None)
303 else:
304 s = slice(n, n + 1)
305 if self.minimal is not None:
306 yield from self.minimal[s]
307 if self.complete is not None:
308 yield from self.complete[s]
309 if self.expanded is not None:
310 yield from self.expanded[s]
313class DataCoordinateTestCase(unittest.TestCase):
315 RANDOM_SEED = 10
317 @classmethod
318 def setUpClass(cls):
319 cls.allDataIds = loadDimensionData()
321 def setUp(self):
322 self.rng = Random(self.RANDOM_SEED)
324 def randomDataIds(self, n: int, dataIds: Optional[DataCoordinateSequence] = None):
325 """Select random data IDs from those loaded from test data.
327 Parameters
328 ----------
329 n : `int`
330 Number of data IDs to select.
331 dataIds : `DataCoordinateSequence`, optional
332 Data IDs to select from. Defaults to ``self.allDataIds``.
334 Returns
335 -------
336 selected : `DataCoordinateSequence`
337 ``n`` Data IDs randomly selected from ``dataIds`` with replacement.
338 """
339 if dataIds is None:
340 dataIds = self.allDataIds
341 return DataCoordinateSequence(self.rng.sample(dataIds, n),
342 graph=dataIds.graph,
343 hasFull=dataIds.hasFull(),
344 hasRecords=dataIds.hasRecords(),
345 check=False)
347 def randomDimensionSubset(self, n: int = 3, graph: Optional[DimensionGraph] = None) -> DimensionGraph:
348 """Generate a random `DimensionGraph` that has a subset of the
349 dimensions in a given one.
351 Parameters
352 ----------
353 n : `int`
354 Number of dimensions to select, before automatic expansion by
355 `DimensionGraph`.
356 dataIds : `DimensionGraph`, optional
357 Dimensions to select ffrom. Defaults to ``self.allDataIds.graph``.
359 Returns
360 -------
361 selected : `DimensionGraph`
362 ``n`` or more dimensions randomly selected from ``graph`` with
363 replacement.
364 """
365 if graph is None:
366 graph = self.allDataIds.graph
367 return DimensionGraph(
368 graph.universe,
369 names=self.rng.sample(list(graph.dimensions.names), max(n, len(graph.dimensions)))
370 )
372 def splitByStateFlags(self, dataIds: Optional[DataCoordinateSequence] = None, *,
373 expanded: bool = True,
374 complete: bool = True,
375 minimal: bool = True) -> SplitByStateFlags:
376 """Given a sequence of data IDs, generate new equivalent sequences
377 containing less information.
379 Parameters
380 ----------
381 dataIds : `DataCoordinateSequence`, optional.
382 Data IDs to start from. Defaults to ``self.allDataIds``.
383 ``dataIds.hasRecords()`` and ``dataIds.hasFull()`` must both return
384 `True`.
385 expanded : `bool`, optional
386 If `True` (default) include the original data IDs that contain all
387 information in the result.
388 complete : `bool`, optional
389 If `True` (default) include data IDs for which ``hasFull()``
390 returns `True` but ``hasRecords()`` does not.
391 minimal : `bool`, optional
392 If `True` (default) include data IDS that only contain values for
393 required dimensions, for which ``hasFull()`` may not return `True`.
395 Returns
396 -------
397 split : `SplitByStateFlags`
398 A dataclass holding the indicated data IDs in attributes that
399 correspond to the boolean keyword arguments.
400 """
401 if dataIds is None:
402 dataIds = self.allDataIds
403 assert dataIds.hasFull() and dataIds.hasRecords()
404 result = SplitByStateFlags(expanded=dataIds)
405 if complete:
406 result.complete = DataCoordinateSequence(
407 [DataCoordinate.standardize(e.full.byName(), graph=dataIds.graph) for e in result.expanded],
408 graph=dataIds.graph
409 )
410 self.assertTrue(result.complete.hasFull())
411 self.assertFalse(result.complete.hasRecords())
412 if minimal:
413 result.minimal = DataCoordinateSequence(
414 [DataCoordinate.standardize(e.byName(), graph=dataIds.graph) for e in result.expanded],
415 graph=dataIds.graph
416 )
417 self.assertEqual(result.minimal.hasFull(), not dataIds.graph.implied)
418 self.assertFalse(result.minimal.hasRecords())
419 if not expanded:
420 result.expanded = None
421 return result
423 def testMappingInterface(self):
424 """Test that the mapping interface in `DataCoordinate` and (when
425 applicable) its ``full`` property are self-consistent and consistent
426 with the ``graph`` property.
427 """
428 for n in range(5):
429 dimensions = self.randomDimensionSubset()
430 dataIds = self.randomDataIds(n=1).subset(dimensions)
431 split = self.splitByStateFlags(dataIds)
432 for dataId in split.chain():
433 with self.subTest(dataId=dataId):
434 self.assertEqual(list(dataId.values()), [dataId[d] for d in dataId.keys()])
435 self.assertEqual(list(dataId.values()), [dataId[d.name] for d in dataId.keys()])
436 self.assertEqual(dataId.keys(), dataId.graph.required)
437 for dataId in itertools.chain(split.complete, split.expanded):
438 with self.subTest(dataId=dataId):
439 self.assertTrue(dataId.hasFull())
440 self.assertEqual(dataId.graph.dimensions, dataId.full.keys())
441 self.assertEqual(list(dataId.full.values()), [dataId[k] for k in dataId.graph.dimensions])
443 def testEquality(self):
444 """Test that different `DataCoordinate` instances with different state
445 flags can be compared with each other and other mappings.
446 """
447 dataIds = self.randomDataIds(n=2)
448 split = self.splitByStateFlags(dataIds)
449 # Iterate over all combinations of different states of DataCoordinate,
450 # with the same underlying data ID values.
451 for a0, b0 in itertools.combinations(split.chain(0), 2):
452 self.assertEqual(a0, b0)
453 self.assertEqual(a0, b0.byName())
454 self.assertEqual(a0.byName(), b0)
455 # Same thing, for a different data ID value.
456 for a1, b1 in itertools.combinations(split.chain(1), 2):
457 self.assertEqual(a1, b1)
458 self.assertEqual(a1, b1.byName())
459 self.assertEqual(a1.byName(), b1)
460 # Iterate over all combinations of different states of DataCoordinate,
461 # with different underlying data ID values.
462 for a0, b1 in itertools.product(split.chain(0), split.chain(1)):
463 self.assertNotEqual(a0, b1)
464 self.assertNotEqual(a1, b0)
465 self.assertNotEqual(a0, b1.byName())
466 self.assertNotEqual(a0.byName(), b1)
467 self.assertNotEqual(a1, b0.byName())
468 self.assertNotEqual(a1.byName(), b0)
470 def testStandardize(self):
471 """Test constructing a DataCoordinate from many different kinds of
472 input via `DataCoordinate.standardize` and `DataCoordinate.subset`.
473 """
474 for n in range(5):
475 dimensions = self.randomDimensionSubset()
476 dataIds = self.randomDataIds(n=1).subset(dimensions)
477 split = self.splitByStateFlags(dataIds)
478 for m, dataId in enumerate(split.chain()):
479 # Passing in any kind of DataCoordinate alone just returns
480 # that object.
481 self.assertIs(dataId, DataCoordinate.standardize(dataId))
482 # Same if we also explicitly pass the dimensions we want.
483 self.assertIs(dataId, DataCoordinate.standardize(dataId, graph=dataId.graph))
484 # Same if we pass the dimensions and some irrelevant
485 # kwargs.
486 self.assertIs(dataId, DataCoordinate.standardize(dataId, graph=dataId.graph, htm7=12))
487 # Test constructing a new data ID from this one with a
488 # subset of the dimensions.
489 # This is not possible for some combinations of
490 # dimensions if hasFull is False (see
491 # `DataCoordinate.subset` docs).
492 newDimensions = self.randomDimensionSubset(n=1, graph=dataId.graph)
493 if dataId.hasFull() or dataId.graph.required.issuperset(newDimensions.required):
494 newDataIds = [
495 dataId.subset(newDimensions),
496 DataCoordinate.standardize(dataId, graph=newDimensions),
497 DataCoordinate.standardize(dataId, graph=newDimensions, htm7=12),
498 ]
499 for newDataId in newDataIds:
500 with self.subTest(newDataId=newDataId, type=type(dataId)):
501 commonKeys = dataId.keys() & newDataId.keys()
502 self.assertTrue(commonKeys)
503 self.assertEqual(
504 [newDataId[k] for k in commonKeys],
505 [dataId[k] for k in commonKeys],
506 )
507 # This should never "downgrade" from
508 # Complete to Minimal or Expanded to Complete.
509 if dataId.hasRecords():
510 self.assertTrue(newDataId.hasRecords())
511 if dataId.hasFull():
512 self.assertTrue(newDataId.hasFull())
513 # Start from a complete data ID, and pass its values in via several
514 # different ways that should be equivalent.
515 for dataId in split.complete:
516 # Split the keys (dimension names) into two random subsets, so
517 # we can pass some as kwargs below.
518 keys1 = set(self.rng.sample(list(dataId.graph.dimensions.names),
519 len(dataId.graph.dimensions)//2))
520 keys2 = dataId.graph.dimensions.names - keys1
521 newCompleteDataIds = [
522 DataCoordinate.standardize(dataId.full.byName(), universe=dataId.universe),
523 DataCoordinate.standardize(dataId.full.byName(), graph=dataId.graph),
524 DataCoordinate.standardize(DataCoordinate.makeEmpty(dataId.graph.universe),
525 **dataId.full.byName()),
526 DataCoordinate.standardize(DataCoordinate.makeEmpty(dataId.graph.universe),
527 graph=dataId.graph, **dataId.full.byName()),
528 DataCoordinate.standardize(**dataId.full.byName(), universe=dataId.universe),
529 DataCoordinate.standardize(graph=dataId.graph, **dataId.full.byName()),
530 DataCoordinate.standardize(
531 {k: dataId[k] for k in keys1},
532 universe=dataId.universe,
533 **{k: dataId[k] for k in keys2}
534 ),
535 DataCoordinate.standardize(
536 {k: dataId[k] for k in keys1},
537 graph=dataId.graph,
538 **{k: dataId[k] for k in keys2}
539 ),
540 ]
541 for newDataId in newCompleteDataIds:
542 with self.subTest(dataId=dataId, newDataId=newDataId, type=type(dataId)):
543 self.assertEqual(dataId, newDataId)
544 self.assertTrue(newDataId.hasFull())
546 def testUnion(self):
547 """Test `DataCoordinate.union`.
548 """
549 # Make test graphs to combine; mostly random, but with a few explicit
550 # cases to make sure certain edge cases are covered.
551 graphs = [self.randomDimensionSubset(n=2) for i in range(2)]
552 graphs.append(self.allDataIds.universe["visit"].graph)
553 graphs.append(self.allDataIds.universe["detector"].graph)
554 graphs.append(self.allDataIds.universe["physical_filter"].graph)
555 graphs.append(self.allDataIds.universe["band"].graph)
556 # Iterate over all combinations, including the same graph with itself.
557 for graph1, graph2 in itertools.product(graphs, repeat=2):
558 parentDataIds = self.randomDataIds(n=1)
559 split1 = self.splitByStateFlags(parentDataIds.subset(graph1))
560 split2 = self.splitByStateFlags(parentDataIds.subset(graph2))
561 parentDataId, = parentDataIds
562 for lhs, rhs in itertools.product(split1.chain(), split2.chain()):
563 unioned = lhs.union(rhs)
564 with self.subTest(lhs=lhs, rhs=rhs, unioned=unioned):
565 self.assertEqual(unioned.graph, graph1.union(graph2))
566 self.assertEqual(unioned, parentDataId.subset(unioned.graph))
567 if unioned.hasFull():
568 self.assertEqual(unioned.subset(lhs.graph), lhs)
569 self.assertEqual(unioned.subset(rhs.graph), rhs)
570 if lhs.hasFull() and rhs.hasFull():
571 self.assertTrue(unioned.hasFull())
572 if lhs.graph >= unioned.graph and lhs.hasFull():
573 self.assertTrue(unioned.hasFull())
574 if lhs.hasRecords():
575 self.assertTrue(unioned.hasRecords())
576 if rhs.graph >= unioned.graph and rhs.hasFull():
577 self.assertTrue(unioned.hasFull())
578 if rhs.hasRecords():
579 self.assertTrue(unioned.hasRecords())
580 if lhs.graph.required | rhs.graph.required >= unioned.graph.dimensions:
581 self.assertTrue(unioned.hasFull())
582 if lhs.hasRecords() and rhs.hasRecords():
583 if lhs.graph.elements | rhs.graph.elements >= unioned.graph.elements:
584 self.assertTrue(unioned.hasRecords())
586 def testRegions(self):
587 """Test that data IDs for a few known dimensions have the expected
588 regions.
589 """
590 for dataId in self.randomDataIds(n=4).subset(
591 DimensionGraph(self.allDataIds.universe, names=["visit"])):
592 self.assertIsNotNone(dataId.region)
593 self.assertEqual(dataId.graph.spatial.names, {"observation_regions"})
594 self.assertEqual(dataId.region, dataId.records["visit"].region)
595 for dataId in self.randomDataIds(n=4).subset(
596 DimensionGraph(self.allDataIds.universe, names=["visit", "detector"])):
597 self.assertIsNotNone(dataId.region)
598 self.assertEqual(dataId.graph.spatial.names, {"observation_regions"})
599 self.assertEqual(dataId.region, dataId.records["visit_detector_region"].region)
600 for dataId in self.randomDataIds(n=4).subset(
601 DimensionGraph(self.allDataIds.universe, names=["tract"])):
602 self.assertIsNotNone(dataId.region)
603 self.assertEqual(dataId.graph.spatial.names, {"skymap_regions"})
604 self.assertEqual(dataId.region, dataId.records["tract"].region)
605 for dataId in self.randomDataIds(n=4).subset(
606 DimensionGraph(self.allDataIds.universe, names=["patch"])):
607 self.assertIsNotNone(dataId.region)
608 self.assertEqual(dataId.graph.spatial.names, {"skymap_regions"})
609 self.assertEqual(dataId.region, dataId.records["patch"].region)
611 def testTimespans(self):
612 """Test that data IDs for a few known dimensions have the expected
613 timespans.
614 """
615 for dataId in self.randomDataIds(n=4).subset(
616 DimensionGraph(self.allDataIds.universe, names=["visit"])):
617 self.assertIsNotNone(dataId.timespan)
618 self.assertEqual(dataId.graph.temporal.names, {"observation_timespans"})
619 self.assertEqual(dataId.timespan, dataId.records["visit"].timespan)
621 def testIterableStatusFlags(self):
622 """Test that DataCoordinateSet and DataCoordinateSequence compute
623 their hasFull and hasRecords flags correctly from their elements.
624 """
625 dataIds = self.randomDataIds(n=10)
626 split = self.splitByStateFlags(dataIds)
627 for cls in (DataCoordinateSet, DataCoordinateSequence):
628 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=True).hasFull())
629 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=False).hasFull())
630 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=True).hasRecords())
631 self.assertTrue(cls(split.expanded, graph=dataIds.graph, check=False).hasRecords())
632 self.assertTrue(cls(split.complete, graph=dataIds.graph, check=True).hasFull())
633 self.assertTrue(cls(split.complete, graph=dataIds.graph, check=False).hasFull())
634 self.assertFalse(cls(split.complete, graph=dataIds.graph, check=True).hasRecords())
635 self.assertFalse(cls(split.complete, graph=dataIds.graph, check=False).hasRecords())
636 with self.assertRaises(ValueError):
637 cls(split.complete, graph=dataIds.graph, hasRecords=True, check=True)
638 self.assertEqual(cls(split.minimal, graph=dataIds.graph, check=True).hasFull(),
639 not dataIds.graph.implied)
640 self.assertEqual(cls(split.minimal, graph=dataIds.graph, check=False).hasFull(),
641 not dataIds.graph.implied)
642 self.assertFalse(cls(split.minimal, graph=dataIds.graph, check=True).hasRecords())
643 self.assertFalse(cls(split.minimal, graph=dataIds.graph, check=False).hasRecords())
644 with self.assertRaises(ValueError):
645 cls(split.minimal, graph=dataIds.graph, hasRecords=True, check=True)
646 if dataIds.graph.implied:
647 with self.assertRaises(ValueError):
648 cls(split.minimal, graph=dataIds.graph, hasFull=True, check=True)
650 def testSetOperations(self):
651 """Test for self-consistency across DataCoordinateSet's operations.
652 """
653 c = self.randomDataIds(n=10).toSet()
654 a = self.randomDataIds(n=20).toSet() | c
655 b = self.randomDataIds(n=20).toSet() | c
656 # Make sure we don't have a particularly unlucky random seed, since
657 # that would make a lot of this test uninteresting.
658 self.assertNotEqual(a, b)
659 self.assertGreater(len(a), 0)
660 self.assertGreater(len(b), 0)
661 # The rest of the tests should not depend on the random seed.
662 self.assertEqual(a, a)
663 self.assertNotEqual(a, a.toSequence())
664 self.assertEqual(a, a.toSequence().toSet())
665 self.assertEqual(a, a.toSequence().toSet())
666 self.assertEqual(b, b)
667 self.assertNotEqual(b, b.toSequence())
668 self.assertEqual(b, b.toSequence().toSet())
669 self.assertEqual(a & b, a.intersection(b))
670 self.assertLessEqual(a & b, a)
671 self.assertLessEqual(a & b, b)
672 self.assertEqual(a | b, a.union(b))
673 self.assertGreaterEqual(a | b, a)
674 self.assertGreaterEqual(a | b, b)
675 self.assertEqual(a - b, a.difference(b))
676 self.assertLessEqual(a - b, a)
677 self.assertLessEqual(b - a, b)
678 self.assertEqual(a ^ b, a.symmetric_difference(b))
679 self.assertGreaterEqual(a ^ b, (a | b) - (a & b))
682if __name__ == "__main__": 682 ↛ 683line 682 didn't jump to line 683, because the condition on line 682 was never true
683 unittest.main()