Coverage for tests/test_quantum.py: 20%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import json
23import unittest
24from typing import Iterable, Tuple
26from lsst.daf.butler import (
27 DataCoordinate,
28 DatasetRef,
29 DatasetType,
30 DimensionRecordsAccumulator,
31 DimensionUniverse,
32 NamedKeyDict,
33 Quantum,
34 SerializedQuantum,
35 StorageClass,
36)
37from lsst.sphgeom import Circle
39"""Tests for Quantum.
40"""
43class MockTask:
44 pass
47class QuantumTestCase(unittest.TestCase):
48 """Test for Quantum."""
50 def _buildFullQuantum(self, taskName, addRecords=False) -> Tuple[Quantum, Iterable[DatasetType]]:
51 universe = DimensionUniverse()
52 datasetTypeNameInit = "test_ds_init"
53 datasetTypeNameInput = "test_ds_input"
54 datasetTypeNameOutput = "test_ds_output"
56 storageClass = StorageClass("testref_StructuredData")
58 instrument = universe["instrument"]
59 instrumentRecord = instrument.RecordClass(name="test")
61 band = universe["band"]
62 bandRecord = band.RecordClass(name="r")
64 physical_filter = universe["physical_filter"]
65 physical_filter_record = physical_filter.RecordClass(name="r", instrument="test")
67 visit_system = universe["visit_system"]
68 visit_system_record = visit_system.RecordClass(id=9, instrument="test", name="test_visit_system")
70 visit = universe["visit"]
71 region = Circle()
72 # create a synthetic value to mock as a visit hash
73 visit_record_42 = visit.RecordClass(
74 id=42,
75 instrument="test",
76 name="test_visit",
77 region=region,
78 )
79 visit_record_43 = visit.RecordClass(
80 id=43,
81 instrument="test",
82 name="test_visit",
83 region=region,
84 )
86 records42 = {
87 instrument: instrumentRecord,
88 band: bandRecord,
89 physical_filter: physical_filter_record,
90 visit_system: visit_system_record,
91 visit: visit_record_42,
92 }
94 records43 = {
95 instrument: instrumentRecord,
96 band: bandRecord,
97 physical_filter: physical_filter_record,
98 visit_system: visit_system_record,
99 visit: visit_record_43,
100 }
102 dataId42 = DataCoordinate.standardize(
103 dict(instrument="test", visit=42), universe=universe # type: ignore
104 )
105 dataId43 = DataCoordinate.standardize(
106 dict(instrument="test", visit=43), universe=universe # type: ignore
107 )
109 if addRecords:
110 dataId42 = dataId42.expanded(records42) # type: ignore
111 dataId43 = dataId43.expanded(records43) # type: ignore
113 datasetTypeInit = DatasetType(
114 datasetTypeNameInit, universe.extract(("instrument", "visit")), storageClass
115 )
116 datasetTypeInput = DatasetType(
117 datasetTypeNameInput, universe.extract(("instrument", "visit")), storageClass
118 )
119 datasetTypeOutput = DatasetType(
120 datasetTypeNameOutput, universe.extract(("instrument", "visit")), storageClass
121 )
122 predictedInputs = {
123 datasetTypeInput: [DatasetRef(datasetTypeInput, dataId42), DatasetRef(datasetTypeInput, dataId43)]
124 }
125 outputs = {
126 datasetTypeOutput: [
127 DatasetRef(datasetTypeOutput, dataId42),
128 DatasetRef(datasetTypeOutput, dataId43),
129 ]
130 }
131 initInputs = {datasetTypeInit: DatasetRef(datasetTypeInit, dataId42)}
133 return Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs, initInputs=initInputs), [
134 datasetTypeInit,
135 datasetTypeInput,
136 datasetTypeOutput,
137 ]
139 def testConstructor(self):
140 """Test of constructor."""
141 # Quantum specific arguments
142 taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency
144 quantum = Quantum(taskName=taskName)
145 self.assertEqual(quantum.taskName, taskName)
146 self.assertEqual(quantum.initInputs, {})
147 self.assertEqual(quantum.inputs, NamedKeyDict())
148 self.assertEqual(quantum.outputs, {})
149 self.assertIsNone(quantum.dataId)
151 quantum, (_, datasetTypeInput, datasetTypeOutput) = self._buildFullQuantum(taskName)
152 self.assertEqual(len(quantum.inputs[datasetTypeInput]), 2)
153 self.assertEqual(len(quantum.outputs[datasetTypeOutput]), 2)
155 def testSerialization(self):
156 taskName = f"{MockTask.__module__}.{MockTask.__qualname__}"
157 # from simple w/o records
158 quantum, _ = self._buildFullQuantum(taskName)
159 serialized = quantum.to_simple()
160 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
162 # from simple w/ records
163 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
164 serialized = quantum.to_simple()
165 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
167 # verify direct works
168 jsonVersion = json.loads(serialized.json())
169 fromDirect = SerializedQuantum.direct(**jsonVersion)
170 self.assertEqual(fromDirect, serialized)
172 # verify direct with records works
173 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
174 serialized = quantum.to_simple()
175 jsonVersion = json.loads(serialized.json())
176 fromDirect = SerializedQuantum.direct(**jsonVersion)
177 self.assertEqual(fromDirect, serialized)
179 # verify the simple accumulator works
180 accumulator = DimensionRecordsAccumulator()
181 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
182 serialized = quantum.to_simple(accumulator)
183 # verify the accumulator was populated
184 recordMapping = accumulator.makeSerializedDimensionRecordMapping()
185 self.assertGreater(len(recordMapping), 0)
186 # verify the dimension records were not written out
187 self.assertEqual(serialized.dimensionRecords, None)
188 serialized.dimensionRecords = accumulator.makeSerializedDimensionRecordMapping() # type: ignore
190 self.assertEqual(quantum, quantum.from_simple(serialized, universe=DimensionUniverse()))
193if __name__ == "__main__": 193 ↛ 194line 193 didn't jump to line 194, because the condition on line 193 was never true
194 unittest.main()