Coverage for tests/qg_test_utils.py: 54%

68 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-07-21 10:39 +0000

1# This file is part of ctrl_bps. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21"""QuantumGraph-related utilities to support ctrl_bps testing. 

22""" 

23 

24# Not actually running Quantum so do not need to override 'run' Method 

25# pylint: disable=abstract-method 

26 

27# Many dummy classes for testing. 

28# pylint: disable=missing-class-docstring 

29 

30import lsst.pipe.base.connectionTypes as cT 

31from lsst.daf.butler import Config, DataCoordinate, DatasetRef, DatasetType, DimensionUniverse, Quantum 

32from lsst.pex.config import Field 

33from lsst.pipe.base import PipelineTask, PipelineTaskConfig, PipelineTaskConnections, QuantumGraph, TaskDef 

34from lsst.utils.introspection import get_full_type_name 

35 

36METADATA = {"D1": [1, 2, 3]} 

37 

38 

39# For each dummy task, create a Connections, Config, and PipelineTask 

40 

41 

42class Dummy1Connections(PipelineTaskConnections, dimensions=("D1", "D2")): 

43 """Connections class used for tests.""" 

44 

45 initOutput = cT.InitOutput(name="Dummy1InitOutput", storageClass="ExposureF", doc="n/a") 

46 input = cT.Input(name="Dummy1Input", storageClass="ExposureF", doc="n/a", dimensions=("D1", "D2")) 

47 output = cT.Output(name="Dummy1Output", storageClass="ExposureF", doc="n/a", dimensions=("D1", "D2")) 

48 

49 

50class Dummy1Config(PipelineTaskConfig, pipelineConnections=Dummy1Connections): 

51 """Config class used for testing.""" 

52 

53 conf1 = Field(dtype=int, default=1, doc="dummy config") 

54 

55 

56class Dummy1PipelineTask(PipelineTask): 

57 """PipelineTask used for testing.""" 

58 

59 ConfigClass = Dummy1Config 

60 

61 

62class Dummy2Connections(PipelineTaskConnections, dimensions=("D1", "D2")): 

63 """Second connections class used for testing.""" 

64 

65 initInput = cT.InitInput(name="Dummy1InitOutput", storageClass="ExposureF", doc="n/a") 

66 initOutput = cT.InitOutput(name="Dummy2InitOutput", storageClass="ExposureF", doc="n/a") 

67 input = cT.Input(name="Dummy1Output", storageClass="ExposureF", doc="n/a", dimensions=("D1", "D2")) 

68 output = cT.Output(name="Dummy2Output", storageClass="ExposureF", doc="n/a", dimensions=("D1", "D2")) 

69 

70 

71class Dummy2Config(PipelineTaskConfig, pipelineConnections=Dummy2Connections): 

72 """Config class used for second pipeline task.""" 

73 

74 conf1 = Field(dtype=int, default=1, doc="dummy config") 

75 

76 

77class Dummy2PipelineTask(PipelineTask): 

78 """Second test PipelineTask.""" 

79 

80 ConfigClass = Dummy2Config 

81 

82 

83class Dummy3Connections(PipelineTaskConnections, dimensions=("D1", "D2")): 

84 """Third connections class used for testing.""" 

85 

86 initInput = cT.InitInput(name="Dummy2InitOutput", storageClass="ExposureF", doc="n/a") 

87 initOutput = cT.InitOutput(name="Dummy3InitOutput", storageClass="ExposureF", doc="n/a") 

88 input = cT.Input(name="Dummy2Output", storageClass="ExposureF", doc="n/a", dimensions=("D1", "D2")) 

89 output = cT.Output(name="Dummy3Output", storageClass="ExposureF", doc="n/a", dimensions=("D1", "D2")) 

90 

91 

92class Dummy3Config(PipelineTaskConfig, pipelineConnections=Dummy3Connections): 

93 """Third config used for testing.""" 

94 

95 conf1 = Field(dtype=int, default=1, doc="dummy config") 

96 

97 

98class Dummy3PipelineTask(PipelineTask): 

99 """Third test PipelineTask.""" 

100 

101 ConfigClass = Dummy3Config 

102 

103 

104# Test if a Task that does not interact with the other Tasks works fine in 

105# the graph. 

106class Dummy4Connections(PipelineTaskConnections, dimensions=("D1", "D2")): 

107 """Fourth connections class used for testing.""" 

108 

109 input = cT.Input(name="Dummy4Input", storageClass="ExposureF", doc="n/a", dimensions=("D1", "D2")) 

110 output = cT.Output(name="Dummy4Output", storageClass="ExposureF", doc="n/a", dimensions=("D1", "D2")) 

111 

112 

113class Dummy4Config(PipelineTaskConfig, pipelineConnections=Dummy4Connections): 

114 """Fourth config used for testing.""" 

115 

116 conf1 = Field(dtype=int, default=1, doc="dummy config") 

117 

118 

119class Dummy4PipelineTask(PipelineTask): 

120 """Fourth test PipelineTask.""" 

121 

122 ConfigClass = Dummy4Config 

123 

124 

125def make_test_quantum_graph(run: str = "run"): 

126 """Create a QuantumGraph for unit tests. 

127 

128 Parameters 

129 ---------- 

130 run : `str`, optional 

131 Name of the RUN collection for output datasets. 

132 

133 Returns 

134 ------- 

135 qgraph : `lsst.pipe.base.QuantumGraph` 

136 A test QuantumGraph looking like the following: 

137 (DummyTask4 is completely independent.) 

138 

139 Numbers in parens are the values for the two dimensions (D1, D2). 

140 

141 T1(1,2) T1(3,4) T4(1,2) T4(3,4) 

142 | | 

143 T2(1,2) T2(3,4) 

144 | | 

145 T3(1,2) T3(3,4) 

146 """ 

147 config = Config( 

148 { 

149 "version": 1, 

150 "skypix": { 

151 "common": "htm7", 

152 "htm": { 

153 "class": "lsst.sphgeom.HtmPixelization", 

154 "max_level": 24, 

155 }, 

156 }, 

157 "elements": { 

158 "D1": { 

159 "keys": [ 

160 { 

161 "name": "id", 

162 "type": "int", 

163 } 

164 ], 

165 "storage": { 

166 "cls": "lsst.daf.butler.registry.dimensions.table.TableDimensionRecordStorage", 

167 }, 

168 }, 

169 "D2": { 

170 "keys": [ 

171 { 

172 "name": "id", 

173 "type": "int", 

174 } 

175 ], 

176 "storage": { 

177 "cls": "lsst.daf.butler.registry.dimensions.table.TableDimensionRecordStorage", 

178 }, 

179 }, 

180 }, 

181 "packers": {}, 

182 } 

183 ) 

184 

185 universe = DimensionUniverse(config=config) 

186 # need to make a mapping of TaskDef to set of quantum 

187 quantum_map = {} 

188 tasks = [] 

189 # Map to keep output/intermediate refs. 

190 intermediate_refs: dict[tuple[DatasetType, DataCoordinate], DatasetRef] = {} 

191 for task, label in ( 

192 (Dummy1PipelineTask, "T1"), 

193 (Dummy2PipelineTask, "T2"), 

194 (Dummy3PipelineTask, "T3"), 

195 (Dummy4PipelineTask, "T4"), 

196 ): 

197 task_def = TaskDef(get_full_type_name(task), task.ConfigClass(), task, label) 

198 tasks.append(task_def) 

199 quantum_set = set() 

200 for dim1, dim2 in ((1, 2), (3, 4)): 

201 if task_def.connections.initInputs: 

202 init_init_ds_type = DatasetType( 

203 task_def.connections.initInput.name, 

204 (), 

205 storageClass=task_def.connections.initInput.storageClass, 

206 universe=universe, 

207 ) 

208 init_refs = [DatasetRef(init_init_ds_type, DataCoordinate.makeEmpty(universe), run=run)] 

209 else: 

210 init_refs = None 

211 input_ds_type = DatasetType( 

212 task_def.connections.input.name, 

213 task_def.connections.input.dimensions, 

214 storageClass=task_def.connections.input.storageClass, 

215 universe=universe, 

216 ) 

217 data_id = DataCoordinate.standardize({"D1": dim1, "D2": dim2}, universe=universe) 

218 if ref := intermediate_refs.get((input_ds_type, data_id)): 

219 input_refs = [ref] 

220 else: 

221 input_refs = [DatasetRef(input_ds_type, data_id, run=run)] 

222 output_ds_type = DatasetType( 

223 task_def.connections.output.name, 

224 task_def.connections.output.dimensions, 

225 storageClass=task_def.connections.output.storageClass, 

226 universe=universe, 

227 ) 

228 ref = DatasetRef(output_ds_type, data_id, run=run) 

229 intermediate_refs[(output_ds_type, data_id)] = ref 

230 output_refs = [ref] 

231 quantum_set.add( 

232 Quantum( 

233 taskName=task.__qualname__, 

234 dataId=data_id, 

235 taskClass=task, 

236 initInputs=init_refs, 

237 inputs={input_ds_type: input_refs}, 

238 outputs={output_ds_type: output_refs}, 

239 ) 

240 ) 

241 quantum_map[task_def] = quantum_set 

242 qgraph = QuantumGraph(quantum_map, metadata=METADATA) 

243 

244 return qgraph