Coverage for tests/test_quantum.py: 15%
76 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-23 09:30 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-23 09:30 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import json
23import unittest
24from collections.abc import Iterable
26from lsst.daf.butler import (
27 DataCoordinate,
28 DatasetRef,
29 DatasetType,
30 DimensionRecordsAccumulator,
31 DimensionUniverse,
32 NamedKeyDict,
33 Quantum,
34 SerializedQuantum,
35 StorageClass,
36)
37from lsst.sphgeom import Circle
39"""Tests for Quantum.
40"""
43class MockTask:
44 pass
47class QuantumTestCase(unittest.TestCase):
48 """Test for Quantum."""
50 def _buildFullQuantum(self, taskName, addRecords=False) -> tuple[Quantum, Iterable[DatasetType]]:
51 universe = DimensionUniverse()
52 datasetTypeNameInit = "test_ds_init"
53 datasetTypeNameInput = "test_ds_input"
54 datasetTypeNameOutput = "test_ds_output"
56 storageClass = StorageClass("testref_StructuredData")
58 instrument = universe["instrument"]
59 instrumentRecord = instrument.RecordClass(name="test")
61 band = universe["band"]
62 bandRecord = band.RecordClass(name="r")
64 physical_filter = universe["physical_filter"]
65 physical_filter_record = physical_filter.RecordClass(name="r", instrument="test", band="r")
67 visit_system = universe["visit_system"]
68 visit_system_record = visit_system.RecordClass(id=9, instrument="test", name="test_visit_system")
70 visit = universe["visit"]
71 region = Circle()
72 # create a synthetic value to mock as a visit hash
73 visit_record_42 = visit.RecordClass(
74 id=42,
75 instrument="test",
76 name="test_visit",
77 physical_filter="r",
78 region=region,
79 )
80 visit_record_43 = visit.RecordClass(
81 id=43,
82 instrument="test",
83 name="test_visit",
84 physical_filter="r",
85 region=region,
86 )
88 records42 = {
89 instrument: instrumentRecord,
90 band: bandRecord,
91 physical_filter: physical_filter_record,
92 visit_system: visit_system_record,
93 visit: visit_record_42,
94 }
96 records43 = {
97 instrument: instrumentRecord,
98 band: bandRecord,
99 physical_filter: physical_filter_record,
100 visit_system: visit_system_record,
101 visit: visit_record_43,
102 }
104 dataId42 = DataCoordinate.standardize(
105 dict(instrument="test", visit=42), universe=universe # type: ignore
106 )
107 dataId43 = DataCoordinate.standardize(
108 dict(instrument="test", visit=43), universe=universe # type: ignore
109 )
111 if addRecords:
112 dataId42 = dataId42.expanded(records42) # type: ignore
113 dataId43 = dataId43.expanded(records43) # type: ignore
115 datasetTypeInit = DatasetType(
116 datasetTypeNameInit, universe.extract(("instrument", "visit")), storageClass
117 )
118 datasetTypeInput = DatasetType(
119 datasetTypeNameInput, universe.extract(("instrument", "visit")), storageClass
120 )
121 datasetTypeOutput = DatasetType(
122 datasetTypeNameOutput, universe.extract(("instrument", "visit")), storageClass
123 )
124 predictedInputs = {
125 datasetTypeInput: [
126 DatasetRef(datasetTypeInput, dataId42, run="input"),
127 DatasetRef(datasetTypeInput, dataId43, run="input"),
128 ]
129 }
130 outputs = {
131 datasetTypeOutput: [
132 DatasetRef(datasetTypeOutput, dataId42, run="some_run"),
133 DatasetRef(datasetTypeOutput, dataId43, run="other_run"),
134 ]
135 }
136 initInputs = {datasetTypeInit: DatasetRef(datasetTypeInit, dataId42, run="input_run")}
138 return Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs, initInputs=initInputs), [
139 datasetTypeInit,
140 datasetTypeInput,
141 datasetTypeOutput,
142 ]
144 def testConstructor(self):
145 """Test of constructor."""
146 # Quantum specific arguments
147 taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency
149 quantum = Quantum(taskName=taskName)
150 self.assertEqual(quantum.taskName, taskName)
151 self.assertEqual(quantum.initInputs, {})
152 self.assertEqual(quantum.inputs, NamedKeyDict())
153 self.assertEqual(quantum.outputs, {})
154 self.assertIsNone(quantum.dataId)
156 quantum, (_, datasetTypeInput, datasetTypeOutput) = self._buildFullQuantum(taskName)
157 self.assertEqual(len(quantum.inputs[datasetTypeInput]), 2)
158 self.assertEqual(len(quantum.outputs[datasetTypeOutput]), 2)
160 def testSerialization(self):
161 taskName = f"{MockTask.__module__}.{MockTask.__qualname__}"
162 # from simple w/o records
163 quantum, _ = self._buildFullQuantum(taskName)
164 serialized = quantum.to_simple()
165 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
167 # from simple w/ records
168 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
169 serialized = quantum.to_simple()
170 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
172 # verify direct works
173 jsonVersion = json.loads(serialized.json())
174 fromDirect = SerializedQuantum.direct(**jsonVersion)
175 self.assertEqual(fromDirect, serialized)
177 # verify direct with records works
178 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
179 serialized = quantum.to_simple()
180 jsonVersion = json.loads(serialized.json())
181 fromDirect = SerializedQuantum.direct(**jsonVersion)
182 self.assertEqual(fromDirect, serialized)
184 # verify the simple accumulator works
185 accumulator = DimensionRecordsAccumulator()
186 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
187 serialized = quantum.to_simple(accumulator)
188 # verify the accumulator was populated
189 recordMapping = accumulator.makeSerializedDimensionRecordMapping()
190 self.assertGreater(len(recordMapping), 0)
191 # verify the dimension records were not written out
192 self.assertEqual(serialized.dimensionRecords, None)
193 serialized.dimensionRecords = accumulator.makeSerializedDimensionRecordMapping() # type: ignore
195 self.assertEqual(quantum, quantum.from_simple(serialized, universe=DimensionUniverse()))
198if __name__ == "__main__":
199 unittest.main()