Coverage for tests/test_quantum.py: 15%

76 statements  

« prev     ^ index     » next       coverage.py v7.4.1, created at 2024-02-01 11:20 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This software is dual licensed under the GNU General Public License and also 

10# under a 3-clause BSD license. Recipients may choose which of these licenses 

11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt, 

12# respectively. If you choose the GPL option then the following text applies 

13# (but note that there is still no warranty even if you opt for BSD instead): 

14# 

15# This program is free software: you can redistribute it and/or modify 

16# it under the terms of the GNU General Public License as published by 

17# the Free Software Foundation, either version 3 of the License, or 

18# (at your option) any later version. 

19# 

20# This program is distributed in the hope that it will be useful, 

21# but WITHOUT ANY WARRANTY; without even the implied warranty of 

22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

23# GNU General Public License for more details. 

24# 

25# You should have received a copy of the GNU General Public License 

26# along with this program. If not, see <http://www.gnu.org/licenses/>. 

27 

28import json 

29import unittest 

30from collections.abc import Iterable 

31 

32from lsst.daf.butler import ( 

33 DataCoordinate, 

34 DatasetRef, 

35 DatasetType, 

36 DimensionRecordsAccumulator, 

37 DimensionUniverse, 

38 NamedKeyDict, 

39 Quantum, 

40 SerializedQuantum, 

41 StorageClass, 

42) 

43from lsst.sphgeom import Circle 

44 

45"""Tests for Quantum. 

46""" 

47 

48 

49class MockTask: 

50 """Mock task for testing.""" 

51 

52 pass 

53 

54 

55class QuantumTestCase(unittest.TestCase): 

56 """Test for Quantum.""" 

57 

58 def _buildFullQuantum(self, taskName, addRecords=False) -> tuple[Quantum, Iterable[DatasetType]]: 

59 universe = DimensionUniverse() 

60 datasetTypeNameInit = "test_ds_init" 

61 datasetTypeNameInput = "test_ds_input" 

62 datasetTypeNameOutput = "test_ds_output" 

63 

64 storageClass = StorageClass("testref_StructuredData") 

65 

66 instrument = universe["instrument"] 

67 instrumentRecord = instrument.RecordClass(name="test") 

68 

69 band = universe["band"] 

70 bandRecord = band.RecordClass(name="r") 

71 

72 physical_filter = universe["physical_filter"] 

73 physical_filter_record = physical_filter.RecordClass(name="r", instrument="test", band="r") 

74 

75 visit_system = universe["visit_system"] 

76 visit_system_record = visit_system.RecordClass(id=9, instrument="test", name="test_visit_system") 

77 

78 visit = universe["visit"] 

79 region = Circle() 

80 # create a synthetic value to mock as a visit hash 

81 visit_record_42 = visit.RecordClass( 

82 id=42, 

83 instrument="test", 

84 name="test_visit", 

85 physical_filter="r", 

86 region=region, 

87 ) 

88 visit_record_43 = visit.RecordClass( 

89 id=43, 

90 instrument="test", 

91 name="test_visit", 

92 physical_filter="r", 

93 region=region, 

94 ) 

95 

96 records42 = { 

97 instrument: instrumentRecord, 

98 band: bandRecord, 

99 physical_filter: physical_filter_record, 

100 visit_system: visit_system_record, 

101 visit: visit_record_42, 

102 } 

103 

104 records43 = { 

105 instrument: instrumentRecord, 

106 band: bandRecord, 

107 physical_filter: physical_filter_record, 

108 visit_system: visit_system_record, 

109 visit: visit_record_43, 

110 } 

111 

112 dataId42 = DataCoordinate.standardize( 

113 dict(instrument="test", visit=42), universe=universe # type: ignore 

114 ) 

115 dataId43 = DataCoordinate.standardize( 

116 dict(instrument="test", visit=43), universe=universe # type: ignore 

117 ) 

118 

119 if addRecords: 

120 dataId42 = dataId42.expanded(records42) # type: ignore 

121 dataId43 = dataId43.expanded(records43) # type: ignore 

122 

123 datasetTypeInit = DatasetType( 

124 datasetTypeNameInit, universe.conform(("instrument", "visit")), storageClass 

125 ) 

126 datasetTypeInput = DatasetType( 

127 datasetTypeNameInput, universe.conform(("instrument", "visit")), storageClass 

128 ) 

129 datasetTypeOutput = DatasetType( 

130 datasetTypeNameOutput, universe.conform(("instrument", "visit")), storageClass 

131 ) 

132 predictedInputs = { 

133 datasetTypeInput: [ 

134 DatasetRef(datasetTypeInput, dataId42, run="input"), 

135 DatasetRef(datasetTypeInput, dataId43, run="input"), 

136 ] 

137 } 

138 outputs = { 

139 datasetTypeOutput: [ 

140 DatasetRef(datasetTypeOutput, dataId42, run="some_run"), 

141 DatasetRef(datasetTypeOutput, dataId43, run="other_run"), 

142 ] 

143 } 

144 initInputs = {datasetTypeInit: DatasetRef(datasetTypeInit, dataId42, run="input_run")} 

145 

146 return Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs, initInputs=initInputs), [ 

147 datasetTypeInit, 

148 datasetTypeInput, 

149 datasetTypeOutput, 

150 ] 

151 

152 def testConstructor(self): 

153 """Test of constructor.""" 

154 # Quantum specific arguments 

155 taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency 

156 

157 quantum = Quantum(taskName=taskName) 

158 self.assertEqual(quantum.taskName, taskName) 

159 self.assertEqual(quantum.initInputs, {}) 

160 self.assertEqual(quantum.inputs, NamedKeyDict()) 

161 self.assertEqual(quantum.outputs, {}) 

162 self.assertIsNone(quantum.dataId) 

163 

164 quantum, (_, datasetTypeInput, datasetTypeOutput) = self._buildFullQuantum(taskName) 

165 self.assertEqual(len(quantum.inputs[datasetTypeInput]), 2) 

166 self.assertEqual(len(quantum.outputs[datasetTypeOutput]), 2) 

167 

168 def testSerialization(self): 

169 taskName = f"{MockTask.__module__}.{MockTask.__qualname__}" 

170 # from simple w/o records 

171 quantum, _ = self._buildFullQuantum(taskName) 

172 serialized = quantum.to_simple() 

173 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse())) 

174 

175 # from simple w/ records 

176 quantum, _ = self._buildFullQuantum(taskName, addRecords=True) 

177 serialized = quantum.to_simple() 

178 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse())) 

179 

180 # verify direct works 

181 jsonVersion = json.loads(serialized.model_dump_json()) 

182 fromDirect = SerializedQuantum.direct(**jsonVersion) 

183 self.assertEqual(fromDirect, serialized) 

184 

185 # verify direct with records works 

186 quantum, _ = self._buildFullQuantum(taskName, addRecords=True) 

187 serialized = quantum.to_simple() 

188 jsonVersion = json.loads(serialized.model_dump_json()) 

189 fromDirect = SerializedQuantum.direct(**jsonVersion) 

190 self.assertEqual(fromDirect, serialized) 

191 

192 # verify the simple accumulator works 

193 accumulator = DimensionRecordsAccumulator() 

194 quantum, _ = self._buildFullQuantum(taskName, addRecords=True) 

195 serialized = quantum.to_simple(accumulator) 

196 # verify the accumulator was populated 

197 recordMapping = accumulator.makeSerializedDimensionRecordMapping() 

198 self.assertGreater(len(recordMapping), 0) 

199 # verify the dimension records were not written out 

200 self.assertEqual(serialized.dimensionRecords, None) 

201 serialized.dimensionRecords = accumulator.makeSerializedDimensionRecordMapping() # type: ignore 

202 

203 self.assertEqual(quantum, quantum.from_simple(serialized, universe=DimensionUniverse())) 

204 

205 

206if __name__ == "__main__": 

207 unittest.main()