Coverage for tests/test_quantum.py: 15%
76 statements
« prev ^ index » next coverage.py v7.3.1, created at 2023-10-02 08:00 +0000
« prev ^ index » next coverage.py v7.3.1, created at 2023-10-02 08:00 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28import json
29import unittest
30from collections.abc import Iterable
32from lsst.daf.butler import (
33 DataCoordinate,
34 DatasetRef,
35 DatasetType,
36 DimensionRecordsAccumulator,
37 DimensionUniverse,
38 NamedKeyDict,
39 Quantum,
40 SerializedQuantum,
41 StorageClass,
42)
43from lsst.sphgeom import Circle
45"""Tests for Quantum.
46"""
49class MockTask:
50 """Mock task for testing."""
52 pass
55class QuantumTestCase(unittest.TestCase):
56 """Test for Quantum."""
58 def _buildFullQuantum(self, taskName, addRecords=False) -> tuple[Quantum, Iterable[DatasetType]]:
59 universe = DimensionUniverse()
60 datasetTypeNameInit = "test_ds_init"
61 datasetTypeNameInput = "test_ds_input"
62 datasetTypeNameOutput = "test_ds_output"
64 storageClass = StorageClass("testref_StructuredData")
66 instrument = universe["instrument"]
67 instrumentRecord = instrument.RecordClass(name="test")
69 band = universe["band"]
70 bandRecord = band.RecordClass(name="r")
72 physical_filter = universe["physical_filter"]
73 physical_filter_record = physical_filter.RecordClass(name="r", instrument="test", band="r")
75 visit_system = universe["visit_system"]
76 visit_system_record = visit_system.RecordClass(id=9, instrument="test", name="test_visit_system")
78 visit = universe["visit"]
79 region = Circle()
80 # create a synthetic value to mock as a visit hash
81 visit_record_42 = visit.RecordClass(
82 id=42,
83 instrument="test",
84 name="test_visit",
85 physical_filter="r",
86 region=region,
87 )
88 visit_record_43 = visit.RecordClass(
89 id=43,
90 instrument="test",
91 name="test_visit",
92 physical_filter="r",
93 region=region,
94 )
96 records42 = {
97 instrument: instrumentRecord,
98 band: bandRecord,
99 physical_filter: physical_filter_record,
100 visit_system: visit_system_record,
101 visit: visit_record_42,
102 }
104 records43 = {
105 instrument: instrumentRecord,
106 band: bandRecord,
107 physical_filter: physical_filter_record,
108 visit_system: visit_system_record,
109 visit: visit_record_43,
110 }
112 dataId42 = DataCoordinate.standardize(
113 dict(instrument="test", visit=42), universe=universe # type: ignore
114 )
115 dataId43 = DataCoordinate.standardize(
116 dict(instrument="test", visit=43), universe=universe # type: ignore
117 )
119 if addRecords:
120 dataId42 = dataId42.expanded(records42) # type: ignore
121 dataId43 = dataId43.expanded(records43) # type: ignore
123 datasetTypeInit = DatasetType(
124 datasetTypeNameInit, universe.extract(("instrument", "visit")), storageClass
125 )
126 datasetTypeInput = DatasetType(
127 datasetTypeNameInput, universe.extract(("instrument", "visit")), storageClass
128 )
129 datasetTypeOutput = DatasetType(
130 datasetTypeNameOutput, universe.extract(("instrument", "visit")), storageClass
131 )
132 predictedInputs = {
133 datasetTypeInput: [
134 DatasetRef(datasetTypeInput, dataId42, run="input"),
135 DatasetRef(datasetTypeInput, dataId43, run="input"),
136 ]
137 }
138 outputs = {
139 datasetTypeOutput: [
140 DatasetRef(datasetTypeOutput, dataId42, run="some_run"),
141 DatasetRef(datasetTypeOutput, dataId43, run="other_run"),
142 ]
143 }
144 initInputs = {datasetTypeInit: DatasetRef(datasetTypeInit, dataId42, run="input_run")}
146 return Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs, initInputs=initInputs), [
147 datasetTypeInit,
148 datasetTypeInput,
149 datasetTypeOutput,
150 ]
152 def testConstructor(self):
153 """Test of constructor."""
154 # Quantum specific arguments
155 taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency
157 quantum = Quantum(taskName=taskName)
158 self.assertEqual(quantum.taskName, taskName)
159 self.assertEqual(quantum.initInputs, {})
160 self.assertEqual(quantum.inputs, NamedKeyDict())
161 self.assertEqual(quantum.outputs, {})
162 self.assertIsNone(quantum.dataId)
164 quantum, (_, datasetTypeInput, datasetTypeOutput) = self._buildFullQuantum(taskName)
165 self.assertEqual(len(quantum.inputs[datasetTypeInput]), 2)
166 self.assertEqual(len(quantum.outputs[datasetTypeOutput]), 2)
168 def testSerialization(self):
169 taskName = f"{MockTask.__module__}.{MockTask.__qualname__}"
170 # from simple w/o records
171 quantum, _ = self._buildFullQuantum(taskName)
172 serialized = quantum.to_simple()
173 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
175 # from simple w/ records
176 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
177 serialized = quantum.to_simple()
178 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
180 # verify direct works
181 jsonVersion = json.loads(serialized.json())
182 fromDirect = SerializedQuantum.direct(**jsonVersion)
183 self.assertEqual(fromDirect, serialized)
185 # verify direct with records works
186 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
187 serialized = quantum.to_simple()
188 jsonVersion = json.loads(serialized.json())
189 fromDirect = SerializedQuantum.direct(**jsonVersion)
190 self.assertEqual(fromDirect, serialized)
192 # verify the simple accumulator works
193 accumulator = DimensionRecordsAccumulator()
194 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
195 serialized = quantum.to_simple(accumulator)
196 # verify the accumulator was populated
197 recordMapping = accumulator.makeSerializedDimensionRecordMapping()
198 self.assertGreater(len(recordMapping), 0)
199 # verify the dimension records were not written out
200 self.assertEqual(serialized.dimensionRecords, None)
201 serialized.dimensionRecords = accumulator.makeSerializedDimensionRecordMapping() # type: ignore
203 self.assertEqual(quantum, quantum.from_simple(serialized, universe=DimensionUniverse()))
206if __name__ == "__main__":
207 unittest.main()