Coverage for tests/test_quantum.py: 21%
78 statements
« prev ^ index » next coverage.py v6.4.1, created at 2022-06-09 09:43 +0000
« prev ^ index » next coverage.py v6.4.1, created at 2022-06-09 09:43 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import json
23import unittest
24from typing import Iterable, Tuple
26from lsst.daf.butler import (
27 DataCoordinate,
28 DatasetRef,
29 DatasetType,
30 DimensionRecordsAccumulator,
31 DimensionUniverse,
32 NamedKeyDict,
33 Quantum,
34 SerializedQuantum,
35 StorageClass,
36)
37from lsst.sphgeom import Circle
39"""Tests for Quantum.
40"""
43class MockTask:
44 pass
47class QuantumTestCase(unittest.TestCase):
48 """Test for Quantum."""
50 def _buildFullQuantum(self, taskName, addRecords=False) -> Tuple[Quantum, Iterable[DatasetType]]:
51 universe = DimensionUniverse()
52 datasetTypeNameInit = "test_ds_init"
53 datasetTypeNameInput = "test_ds_input"
54 datasetTypeNameOutput = "test_ds_output"
56 storageClass = StorageClass("testref_StructuredData")
58 instrument = universe["instrument"]
59 instrumentRecord = instrument.RecordClass(name="test")
61 band = universe["band"]
62 bandRecord = band.RecordClass(name="r")
64 physical_filter = universe["physical_filter"]
65 physical_filter_record = physical_filter.RecordClass(name="r", instrument="test", band="r")
67 visit_system = universe["visit_system"]
68 visit_system_record = visit_system.RecordClass(id=9, instrument="test", name="test_visit_system")
70 visit = universe["visit"]
71 region = Circle()
72 # create a synthetic value to mock as a visit hash
73 visit_record_42 = visit.RecordClass(
74 id=42,
75 instrument="test",
76 name="test_visit",
77 physical_filter="r",
78 region=region,
79 )
80 visit_record_43 = visit.RecordClass(
81 id=43,
82 instrument="test",
83 name="test_visit",
84 physical_filter="r",
85 region=region,
86 )
88 records42 = {
89 instrument: instrumentRecord,
90 band: bandRecord,
91 physical_filter: physical_filter_record,
92 visit_system: visit_system_record,
93 visit: visit_record_42,
94 }
96 records43 = {
97 instrument: instrumentRecord,
98 band: bandRecord,
99 physical_filter: physical_filter_record,
100 visit_system: visit_system_record,
101 visit: visit_record_43,
102 }
104 dataId42 = DataCoordinate.standardize(
105 dict(instrument="test", visit=42), universe=universe # type: ignore
106 )
107 dataId43 = DataCoordinate.standardize(
108 dict(instrument="test", visit=43), universe=universe # type: ignore
109 )
111 if addRecords:
112 dataId42 = dataId42.expanded(records42) # type: ignore
113 dataId43 = dataId43.expanded(records43) # type: ignore
115 datasetTypeInit = DatasetType(
116 datasetTypeNameInit, universe.extract(("instrument", "visit")), storageClass
117 )
118 datasetTypeInput = DatasetType(
119 datasetTypeNameInput, universe.extract(("instrument", "visit")), storageClass
120 )
121 datasetTypeOutput = DatasetType(
122 datasetTypeNameOutput, universe.extract(("instrument", "visit")), storageClass
123 )
124 predictedInputs = {
125 datasetTypeInput: [DatasetRef(datasetTypeInput, dataId42), DatasetRef(datasetTypeInput, dataId43)]
126 }
127 outputs = {
128 datasetTypeOutput: [
129 DatasetRef(datasetTypeOutput, dataId42),
130 DatasetRef(datasetTypeOutput, dataId43),
131 ]
132 }
133 initInputs = {datasetTypeInit: DatasetRef(datasetTypeInit, dataId42)}
135 return Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs, initInputs=initInputs), [
136 datasetTypeInit,
137 datasetTypeInput,
138 datasetTypeOutput,
139 ]
141 def testConstructor(self):
142 """Test of constructor."""
143 # Quantum specific arguments
144 taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency
146 quantum = Quantum(taskName=taskName)
147 self.assertEqual(quantum.taskName, taskName)
148 self.assertEqual(quantum.initInputs, {})
149 self.assertEqual(quantum.inputs, NamedKeyDict())
150 self.assertEqual(quantum.outputs, {})
151 self.assertIsNone(quantum.dataId)
153 quantum, (_, datasetTypeInput, datasetTypeOutput) = self._buildFullQuantum(taskName)
154 self.assertEqual(len(quantum.inputs[datasetTypeInput]), 2)
155 self.assertEqual(len(quantum.outputs[datasetTypeOutput]), 2)
157 def testSerialization(self):
158 taskName = f"{MockTask.__module__}.{MockTask.__qualname__}"
159 # from simple w/o records
160 quantum, _ = self._buildFullQuantum(taskName)
161 serialized = quantum.to_simple()
162 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
164 # from simple w/ records
165 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
166 serialized = quantum.to_simple()
167 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
169 # verify direct works
170 jsonVersion = json.loads(serialized.json())
171 fromDirect = SerializedQuantum.direct(**jsonVersion)
172 self.assertEqual(fromDirect, serialized)
174 # verify direct with records works
175 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
176 serialized = quantum.to_simple()
177 jsonVersion = json.loads(serialized.json())
178 fromDirect = SerializedQuantum.direct(**jsonVersion)
179 self.assertEqual(fromDirect, serialized)
181 # verify the simple accumulator works
182 accumulator = DimensionRecordsAccumulator()
183 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
184 serialized = quantum.to_simple(accumulator)
185 # verify the accumulator was populated
186 recordMapping = accumulator.makeSerializedDimensionRecordMapping()
187 self.assertGreater(len(recordMapping), 0)
188 # verify the dimension records were not written out
189 self.assertEqual(serialized.dimensionRecords, None)
190 serialized.dimensionRecords = accumulator.makeSerializedDimensionRecordMapping() # type: ignore
192 self.assertEqual(quantum, quantum.from_simple(serialized, universe=DimensionUniverse()))
195if __name__ == "__main__": 195 ↛ 196line 195 didn't jump to line 196, because the condition on line 195 was never true
196 unittest.main()