Coverage for tests/test_quantum.py: 20%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import json
23from typing import Tuple, Iterable
24import unittest
26from lsst.daf.butler import (Quantum,
27 DimensionUniverse,
28 NamedKeyDict,
29 StorageClass,
30 DatasetType,
31 DatasetRef,
32 SerializedQuantum,
33 DataCoordinate,
34 DimensionRecordsAccumulator)
35from lsst.sphgeom import Circle
37"""Tests for Quantum.
38"""
41class MockTask:
42 pass
45class QuantumTestCase(unittest.TestCase):
46 """Test for Quantum.
47 """
49 def _buildFullQuantum(self, taskName, addRecords=False) -> Tuple[Quantum, Iterable[DatasetType]]:
50 universe = DimensionUniverse()
51 datasetTypeNameInit = "test_ds_init"
52 datasetTypeNameInput = "test_ds_input"
53 datasetTypeNameOutput = "test_ds_output"
55 storageClass = StorageClass("testref_StructuredData")
57 instrument = universe['instrument']
58 instrumentRecord = instrument.RecordClass(name="test")
60 band = universe['band']
61 bandRecord = band.RecordClass(name="r")
63 physical_filter = universe['physical_filter']
64 physical_filter_record = physical_filter.RecordClass(name='r', instrument='test')
66 visit_system = universe['visit_system']
67 visit_system_record = visit_system.RecordClass(id=9, instrument='test', name='test_visit_system')
69 visit = universe['visit']
70 region = Circle()
71 hashed = hash(visit).to_bytes(30, 'little')
72 visit_record_42 = visit.RecordClass(id=42, instrument='test', name='test_visit', region=region,
73 hash=hashed)
74 visit_record_42 = visit.RecordClass(id=43, instrument='test', name='test_visit', region=region,
75 hash=hashed)
77 records42 = {instrument: instrumentRecord, band: bandRecord,
78 physical_filter: physical_filter_record,
79 visit_system: visit_system_record, visit: visit_record_42}
81 records43 = {instrument: instrumentRecord, band: bandRecord,
82 physical_filter: physical_filter_record,
83 visit_system: visit_system_record, visit: visit_record_42}
85 dataId42 = DataCoordinate.standardize(dict(instrument='test', visit=42), # type: ignore
86 universe=universe)
87 dataId43 = DataCoordinate.standardize(dict(instrument='test', visit=43), # type: ignore
88 universe=universe)
90 if addRecords:
91 dataId42 = dataId42.expanded(records42) # type: ignore
92 dataId43 = dataId43.expanded(records43) # type: ignore
94 datasetTypeInit = DatasetType(datasetTypeNameInit, universe.extract(("instrument", "visit")),
95 storageClass)
96 datasetTypeInput = DatasetType(datasetTypeNameInput, universe.extract(("instrument", "visit")),
97 storageClass)
98 datasetTypeOutput = DatasetType(datasetTypeNameOutput, universe.extract(("instrument", "visit")),
99 storageClass)
100 predictedInputs = {datasetTypeInput: [DatasetRef(datasetTypeInput, dataId42),
101 DatasetRef(datasetTypeInput, dataId43)]}
102 outputs = {datasetTypeOutput: [DatasetRef(datasetTypeOutput, dataId42),
103 DatasetRef(datasetTypeOutput, dataId43)]}
104 initInputs = {datasetTypeInit: DatasetRef(datasetTypeInit, dataId42)}
106 return Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs,
107 initInputs=initInputs), [datasetTypeInit, datasetTypeInput, datasetTypeOutput]
109 def testConstructor(self):
110 """Test of constructor.
111 """
112 # Quantum specific arguments
113 taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency
115 quantum = Quantum(taskName=taskName)
116 self.assertEqual(quantum.taskName, taskName)
117 self.assertEqual(quantum.initInputs, {})
118 self.assertEqual(quantum.inputs, NamedKeyDict())
119 self.assertEqual(quantum.outputs, {})
120 self.assertIsNone(quantum.dataId)
122 quantum, (_, datasetTypeInput, datasetTypeOutput) = self._buildFullQuantum(taskName)
123 self.assertEqual(len(quantum.inputs[datasetTypeInput]), 2)
124 self.assertEqual(len(quantum.outputs[datasetTypeOutput]), 2)
126 def testSerialization(self):
127 taskName = f"{MockTask.__module__}.{MockTask.__qualname__}"
128 # from simple w/o records
129 quantum, _ = self._buildFullQuantum(taskName)
130 serialized = quantum.to_simple()
131 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
133 # from simple w/ records
134 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
135 serialized = quantum.to_simple()
136 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse()))
138 # verify direct works
139 jsonVersion = json.loads(serialized.json())
140 fromDirect = SerializedQuantum.direct(**jsonVersion)
141 self.assertEqual(fromDirect, serialized)
143 # verify direct with records works
144 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
145 serialized = quantum.to_simple()
146 jsonVersion = json.loads(serialized.json())
147 fromDirect = SerializedQuantum.direct(**jsonVersion)
148 self.assertEqual(fromDirect, serialized)
150 # verify the simple accumulator works
151 accumulator = DimensionRecordsAccumulator()
152 quantum, _ = self._buildFullQuantum(taskName, addRecords=True)
153 serialized = quantum.to_simple(accumulator)
154 # verify the accumulator was populated
155 recordMapping = accumulator.makeSerializedDimensionRecordMapping()
156 self.assertGreater(len(recordMapping), 0)
157 # verify the dimension records were not written out
158 self.assertEqual(serialized.dimensionRecords, None)
159 serialized.dimensionRecords = accumulator.makeSerializedDimensionRecordMapping() # type: ignore
161 self.assertEqual(quantum, quantum.from_simple(serialized, universe=DimensionUniverse()))
164if __name__ == "__main__": 164 ↛ 165line 164 didn't jump to line 165, because the condition on line 164 was never true
165 unittest.main()