Coverage for tests/test_quantum.py: 20%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import json
23from typing import Tuple, Iterable
24import unittest
26from lsst.daf.butler import (Quantum,
27 DimensionUniverse,
28 NamedKeyDict,
29 StorageClass,
30 DatasetType,
31 DatasetRef,
32 SerializedQuantum,
33 DataCoordinate,
34 DimensionRecordsAccumulator)
35from lsst.sphgeom import Circle
37"""Tests for Quantum.
38"""
41class MockTask:
42 pass
45class QuantumTestCase(unittest.TestCase):
46 """Test for Quantum.
47 """
49 def _buildFullQuantum(self, taskName, addRecords=False) -> Tuple[Quantum, Iterable[DatasetType]]:
50 universe = DimensionUniverse()
51 datasetTypeNameInit = "test_ds_init"
52 datasetTypeNameInput = "test_ds_input"
53 datasetTypeNameOutput = "test_ds_output"
55 storageClass = StorageClass("testref_StructuredData")
57 instrument = universe['instrument']
58 instrumentRecord = instrument.RecordClass(name="test")
60 band = universe['band']
61 bandRecord = band.RecordClass(name="r")
63 physical_filter = universe['physical_filter']
64 physical_filter_record = physical_filter.RecordClass(name='r', instrument='test')
66 visit_system = universe['visit_system']
67 visit_system_record = visit_system.RecordClass(id=9, instrument='test', name='test_visit_system')
69 visit = universe['visit']
70 region = Circle()
71 # create a synthetic value to mock as a visit hash
72 visit_record_42 = visit.RecordClass(id=42, instrument='test', name='test_visit', region=region,)
73 visit_record_42 = visit.RecordClass(id=43, instrument='test', name='test_visit', region=region,)
75 records42 = {instrument: instrumentRecord, band: bandRecord,
76 physical_filter: physical_filter_record,
77 visit_system: visit_system_record, visit: visit_record_42}
79 records43 = {instrument: instrumentRecord, band: bandRecord,
80 physical_filter: physical_filter_record,
81 visit_system: visit_system_record, visit: visit_record_42}
83 dataId42 = DataCoordinate.standardize(dict(instrument='test', visit=42), # type: ignore
84 universe=universe)
85 dataId43 = DataCoordinate.standardize(dict(instrument='test', visit=43), # type: ignore
86 universe=universe)
88 if addRecords:
89 dataId42 = dataId42.expanded(records42) # type: ignore
90 dataId43 = dataId43.expanded(records43) # type: ignore
92 datasetTypeInit = DatasetType(datasetTypeNameInit, universe.extract(("instrument", "visit")),
93 storageClass)
94 datasetTypeInput = DatasetType(datasetTypeNameInput, universe.extract(("instrument", "visit")),
95 storageClass)
96 datasetTypeOutput = DatasetType(datasetTypeNameOutput, universe.extract(("instrument", "visit")),
97 storageClass)
98 predictedInputs = {datasetTypeInput: [DatasetRef(datasetTypeInput, dataId42),
99 DatasetRef(datasetTypeInput, dataId43)]}
100 outputs = {datasetTypeOutput: [DatasetRef(datasetTypeOutput, dataId42),
101 DatasetRef(datasetTypeOutput, dataId43)]}
102 initInputs = {datasetTypeInit: DatasetRef(datasetTypeInit, dataId42)}
104 return Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs,
105 initInputs=initInputs), [datasetTypeInit, datasetTypeInput, datasetTypeOutput]
107 def testConstructor(self):
108 """Test of constructor.
109 """
110 # Quantum specific arguments
111 taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency
113 quantum = Quantum(taskName=taskName)
114 self.assertEqual(quantum.taskName, taskName)
115 self.assertEqual(quantum.initInputs, {})
116 self.assertEqual(quantum.inputs, NamedKeyDict())
117 self.assertEqual(quantum.outputs, {})
118 self.assertIsNone(quantum.dataId)
120 quantum, (_, datasetTypeInput, datasetTypeOutput) = self._buildFullQuantum(taskName)
121 self.assertEqual(len(quantum.inputs[datasetTypeInput]), 2)
122 self.assertEqual(len(quantum.outputs[datasetTypeOutput]), 2)
124 def testSerialization(self):
125 taskName = f"{MockTask.__module__}.{MockTask.__qualname__}"
126 # from simple w/o records
127 quantum, _ = self._buildFullQuantum(taskName)
128 serialized = quantum.to_simple()
129 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
131 # from simple w/ records
132 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
133 serialized = quantum.to_simple()
134 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
136 # verify direct works
137 jsonVersion = json.loads(serialized.json())
138 fromDirect = SerializedQuantum.direct(**jsonVersion)
139 self.assertEqual(fromDirect, serialized)
141 # verify direct with records works
142 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
143 serialized = quantum.to_simple()
144 jsonVersion = json.loads(serialized.json())
145 fromDirect = SerializedQuantum.direct(**jsonVersion)
146 self.assertEqual(fromDirect, serialized)
148 # verify the simple accumulator works
149 accumulator = DimensionRecordsAccumulator()
150 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
151 serialized = quantum.to_simple(accumulator)
152 # verify the accumulator was populated
153 recordMapping = accumulator.makeSerializedDimensionRecordMapping()
154 self.assertGreater(len(recordMapping), 0)
155 # verify the dimension records were not written out
156 self.assertEqual(serialized.dimensionRecords, None)
157 serialized.dimensionRecords = accumulator.makeSerializedDimensionRecordMapping() # type: ignore
159 self.assertEqual(quantum, quantum.from_simple(serialized, universe=DimensionUniverse()))
162if __name__ == "__main__": 162 ↛ 163line 162 didn't jump to line 163, because the condition on line 162 was never true
163 unittest.main()