Coverage for tests/test_quantum.py: 15%

76 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-08-12 09:20 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22import json 

23import unittest 

24from collections.abc import Iterable 

25 

26from lsst.daf.butler import ( 

27 DataCoordinate, 

28 DatasetRef, 

29 DatasetType, 

30 DimensionRecordsAccumulator, 

31 DimensionUniverse, 

32 NamedKeyDict, 

33 Quantum, 

34 SerializedQuantum, 

35 StorageClass, 

36) 

37from lsst.sphgeom import Circle 

38 

39"""Tests for Quantum. 

40""" 

41 

42 

43class MockTask: 

44 """Mock task for testing.""" 

45 

46 pass 

47 

48 

49class QuantumTestCase(unittest.TestCase): 

50 """Test for Quantum.""" 

51 

52 def _buildFullQuantum(self, taskName, addRecords=False) -> tuple[Quantum, Iterable[DatasetType]]: 

53 universe = DimensionUniverse() 

54 datasetTypeNameInit = "test_ds_init" 

55 datasetTypeNameInput = "test_ds_input" 

56 datasetTypeNameOutput = "test_ds_output" 

57 

58 storageClass = StorageClass("testref_StructuredData") 

59 

60 instrument = universe["instrument"] 

61 instrumentRecord = instrument.RecordClass(name="test") 

62 

63 band = universe["band"] 

64 bandRecord = band.RecordClass(name="r") 

65 

66 physical_filter = universe["physical_filter"] 

67 physical_filter_record = physical_filter.RecordClass(name="r", instrument="test", band="r") 

68 

69 visit_system = universe["visit_system"] 

70 visit_system_record = visit_system.RecordClass(id=9, instrument="test", name="test_visit_system") 

71 

72 visit = universe["visit"] 

73 region = Circle() 

74 # create a synthetic value to mock as a visit hash 

75 visit_record_42 = visit.RecordClass( 

76 id=42, 

77 instrument="test", 

78 name="test_visit", 

79 physical_filter="r", 

80 region=region, 

81 ) 

82 visit_record_43 = visit.RecordClass( 

83 id=43, 

84 instrument="test", 

85 name="test_visit", 

86 physical_filter="r", 

87 region=region, 

88 ) 

89 

90 records42 = { 

91 instrument: instrumentRecord, 

92 band: bandRecord, 

93 physical_filter: physical_filter_record, 

94 visit_system: visit_system_record, 

95 visit: visit_record_42, 

96 } 

97 

98 records43 = { 

99 instrument: instrumentRecord, 

100 band: bandRecord, 

101 physical_filter: physical_filter_record, 

102 visit_system: visit_system_record, 

103 visit: visit_record_43, 

104 } 

105 

106 dataId42 = DataCoordinate.standardize( 

107 dict(instrument="test", visit=42), universe=universe # type: ignore 

108 ) 

109 dataId43 = DataCoordinate.standardize( 

110 dict(instrument="test", visit=43), universe=universe # type: ignore 

111 ) 

112 

113 if addRecords: 

114 dataId42 = dataId42.expanded(records42) # type: ignore 

115 dataId43 = dataId43.expanded(records43) # type: ignore 

116 

117 datasetTypeInit = DatasetType( 

118 datasetTypeNameInit, universe.extract(("instrument", "visit")), storageClass 

119 ) 

120 datasetTypeInput = DatasetType( 

121 datasetTypeNameInput, universe.extract(("instrument", "visit")), storageClass 

122 ) 

123 datasetTypeOutput = DatasetType( 

124 datasetTypeNameOutput, universe.extract(("instrument", "visit")), storageClass 

125 ) 

126 predictedInputs = { 

127 datasetTypeInput: [ 

128 DatasetRef(datasetTypeInput, dataId42, run="input"), 

129 DatasetRef(datasetTypeInput, dataId43, run="input"), 

130 ] 

131 } 

132 outputs = { 

133 datasetTypeOutput: [ 

134 DatasetRef(datasetTypeOutput, dataId42, run="some_run"), 

135 DatasetRef(datasetTypeOutput, dataId43, run="other_run"), 

136 ] 

137 } 

138 initInputs = {datasetTypeInit: DatasetRef(datasetTypeInit, dataId42, run="input_run")} 

139 

140 return Quantum(taskName=taskName, inputs=predictedInputs, outputs=outputs, initInputs=initInputs), [ 

141 datasetTypeInit, 

142 datasetTypeInput, 

143 datasetTypeOutput, 

144 ] 

145 

146 def testConstructor(self): 

147 """Test of constructor.""" 

148 # Quantum specific arguments 

149 taskName = "some.task.object" # can't use a real PipelineTask due to inverted package dependency 

150 

151 quantum = Quantum(taskName=taskName) 

152 self.assertEqual(quantum.taskName, taskName) 

153 self.assertEqual(quantum.initInputs, {}) 

154 self.assertEqual(quantum.inputs, NamedKeyDict()) 

155 self.assertEqual(quantum.outputs, {}) 

156 self.assertIsNone(quantum.dataId) 

157 

158 quantum, (_, datasetTypeInput, datasetTypeOutput) = self._buildFullQuantum(taskName) 

159 self.assertEqual(len(quantum.inputs[datasetTypeInput]), 2) 

160 self.assertEqual(len(quantum.outputs[datasetTypeOutput]), 2) 

161 

162 def testSerialization(self): 

163 taskName = f"{MockTask.__module__}.{MockTask.__qualname__}" 

164 # from simple w/o records 

165 quantum, _ = self._buildFullQuantum(taskName) 

166 serialized = quantum.to_simple() 

167 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse())) 

168 

169 # from simple w/ records 

170 quantum, _ = self._buildFullQuantum(taskName, addRecords=True) 

171 serialized = quantum.to_simple() 

172 self.assertEqual(quantum, quantum.from_simple(serialized, DimensionUniverse())) 

173 

174 # verify direct works 

175 jsonVersion = json.loads(serialized.json()) 

176 fromDirect = SerializedQuantum.direct(**jsonVersion) 

177 self.assertEqual(fromDirect, serialized) 

178 

179 # verify direct with records works 

180 quantum, _ = self._buildFullQuantum(taskName, addRecords=True) 

181 serialized = quantum.to_simple() 

182 jsonVersion = json.loads(serialized.json()) 

183 fromDirect = SerializedQuantum.direct(**jsonVersion) 

184 self.assertEqual(fromDirect, serialized) 

185 

186 # verify the simple accumulator works 

187 accumulator = DimensionRecordsAccumulator() 

188 quantum, _ = self._buildFullQuantum(taskName, addRecords=True) 

189 serialized = quantum.to_simple(accumulator) 

190 # verify the accumulator was populated 

191 recordMapping = accumulator.makeSerializedDimensionRecordMapping() 

192 self.assertGreater(len(recordMapping), 0) 

193 # verify the dimension records were not written out 

194 self.assertEqual(serialized.dimensionRecords, None) 

195 serialized.dimensionRecords = accumulator.makeSerializedDimensionRecordMapping() # type: ignore 

196 

197 self.assertEqual(quantum, quantum.from_simple(serialized, universe=DimensionUniverse())) 

198 

199 

200if __name__ == "__main__": 

201 unittest.main()