Coverage for tests/test_transform.py: 22%

97 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-10-19 09:54 +0000

1# This file is part of ctrl_bps. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This software is dual licensed under the GNU General Public License and also 

10# under a 3-clause BSD license. Recipients may choose which of these licenses 

11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt, 

12# respectively. If you choose the GPL option then the following text applies 

13# (but note that there is still no warranty even if you opt for BSD instead): 

14# 

15# This program is free software: you can redistribute it and/or modify 

16# it under the terms of the GNU General Public License as published by 

17# the Free Software Foundation, either version 3 of the License, or 

18# (at your option) any later version. 

19# 

20# This program is distributed in the hope that it will be useful, 

21# but WITHOUT ANY WARRANTY; without even the implied warranty of 

22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

23# GNU General Public License for more details. 

24# 

25# You should have received a copy of the GNU General Public License 

26# along with this program. If not, see <https://www.gnu.org/licenses/>. 

27"""Unit tests of transform.py""" 

28import dataclasses 

29import os 

30import shutil 

31import tempfile 

32import unittest 

33 

34from cqg_test_utils import make_test_clustered_quantum_graph 

35from lsst.ctrl.bps import BPS_SEARCH_ORDER, BpsConfig, GenericWorkflowJob 

36from lsst.ctrl.bps.transform import _get_job_values, create_generic_workflow, create_generic_workflow_config 

37 

38TESTDIR = os.path.abspath(os.path.dirname(__file__)) 

39 

40 

41class TestCreateGenericWorkflowConfig(unittest.TestCase): 

42 """Tests of create_generic_workflow_config.""" 

43 

44 def testCreate(self): 

45 """Test successful creation of the config.""" 

46 config = BpsConfig({"a": 1, "b": 2, "uniqProcName": "testCreate"}) 

47 wf_config = create_generic_workflow_config(config, "/test/create/prefix") 

48 self.assertIsInstance(wf_config, BpsConfig) 

49 for key in config: 

50 self.assertEqual(wf_config[key], config[key]) 

51 self.assertEqual(wf_config["workflowName"], "testCreate") 

52 self.assertEqual(wf_config["workflowPath"], "/test/create/prefix") 

53 

54 

55class TestCreateGenericWorkflow(unittest.TestCase): 

56 """Tests of create_generic_workflow.""" 

57 

58 def setUp(self): 

59 self.tmpdir = tempfile.mkdtemp(dir=TESTDIR) 

60 self.config = BpsConfig( 

61 { 

62 "runInit": True, 

63 "computeSite": "global", 

64 "runQuantumCommand": "gexe -q {qgraphFile} --qgraph-node-id {qgraphNodeId}", 

65 "clusterTemplate": "{D1}_{D2}", 

66 "cluster": { 

67 "cl1": {"pipetasks": "T1, T2", "dimensions": "D1, D2"}, 

68 "cl2": {"pipetasks": "T3, T4", "dimensions": "D1, D2"}, 

69 }, 

70 "cloud": { 

71 "cloud1": {"runQuantumCommand": "c1exe -q {qgraphFile} --qgraph-node-id {qgraphNodeId}"}, 

72 "cloud2": {"runQuantumCommand": "c2exe -q {qgraphFile} --qgraph-node-id {qgraphNodeId}"}, 

73 }, 

74 "site": { 

75 "site1": {"runQuantumCommand": "s1exe -q {qgraphFile} --qgraph-node-id {qgraphNodeId}"}, 

76 "site2": {"runQuantumCommand": "s2exe -q {qgraphFile} --qgraph-node-id {qgraphNodeId}"}, 

77 "global": {"runQuantumCommand": "s3exe -q {qgraphFile} --qgraph-node-id {qgraphNodeId}"}, 

78 }, 

79 # Needed because transform assumes they exist 

80 "whenSaveJobQgraph": "NEVER", 

81 "executionButler": {"whenCreate": "SUBMIT", "whenMerge": "ALWAYS"}, 

82 }, 

83 BPS_SEARCH_ORDER, 

84 ) 

85 _, self.cqg = make_test_clustered_quantum_graph(self.tmpdir) 

86 

87 def tearDown(self): 

88 shutil.rmtree(self.tmpdir, ignore_errors=True) 

89 

90 def testCreatingGenericWorkflowGlobal(self): 

91 """Test creating a GenericWorkflow with global settings.""" 

92 config = BpsConfig(self.config) 

93 config["computeCloud"] = "cloud1" 

94 config["computeSite"] = "site2" 

95 config["queue"] = "global_queue" 

96 print(config) 

97 workflow = create_generic_workflow(config, self.cqg, "test_gw", self.tmpdir) 

98 for jname in workflow: 

99 gwjob = workflow.get_job(jname) 

100 print(gwjob) 

101 self.assertEqual(gwjob.compute_site, "site2") 

102 self.assertEqual(gwjob.compute_cloud, "cloud1") 

103 self.assertEqual(gwjob.executable.src_uri, "s2exe") 

104 self.assertEqual(gwjob.queue, "global_queue") 

105 final = workflow.get_final() 

106 self.assertEqual(final.compute_site, "site2") 

107 self.assertEqual(final.compute_cloud, "cloud1") 

108 self.assertEqual(final.queue, "global_queue") 

109 

110 def testCreatingQuantumGraphMixed(self): 

111 """Test creating a GenericWorkflow with setting overrides.""" 

112 config = BpsConfig(self.config) 

113 config[".cluster.cl1.computeCloud"] = "cloud2" 

114 config[".cluster.cl1.computeSite"] = "notthere" 

115 config[".cluster.cl2.computeSite"] = "site1" 

116 config[".executionButler.queue"] = "special_final_queue" 

117 config[".executionButler.computeSite"] = "special_site" 

118 config[".executionButler.computeCloud"] = "special_cloud" 

119 workflow = create_generic_workflow(config, self.cqg, "test_gw", self.tmpdir) 

120 for jname in workflow: 

121 gwjob = workflow.get_job(jname) 

122 print(gwjob) 

123 if jname.startswith("cl1"): 

124 self.assertEqual(gwjob.compute_site, "notthere") 

125 self.assertEqual(gwjob.compute_cloud, "cloud2") 

126 self.assertEqual(gwjob.executable.src_uri, "c2exe") 

127 elif jname.startswith("cl2"): 

128 self.assertEqual(gwjob.compute_site, "site1") 

129 self.assertIsNone(gwjob.compute_cloud) 

130 self.assertEqual(gwjob.executable.src_uri, "s1exe") 

131 elif jname.startswith("pipetask"): 

132 self.assertEqual(gwjob.compute_site, "global") 

133 self.assertIsNone(gwjob.compute_cloud) 

134 self.assertEqual(gwjob.executable.src_uri, "s3exe") 

135 final = workflow.get_final() 

136 self.assertEqual(final.compute_site, "special_site") 

137 self.assertEqual(final.compute_cloud, "special_cloud") 

138 self.assertEqual(final.queue, "special_final_queue") 

139 

140 

141class TestGetJobValues(unittest.TestCase): 

142 """Tests of _get_job_values.""" 

143 

144 def setUp(self): 

145 self.default_job = GenericWorkflowJob("default_job") 

146 

147 def testGettingDefaults(self): 

148 """Test retrieving default values.""" 

149 config = BpsConfig({}) 

150 job_values = _get_job_values(config, {}, None) 

151 self.assertTrue( 

152 all( 

153 getattr(self.default_job, field.name) == job_values[field.name] 

154 for field in dataclasses.fields(self.default_job) 

155 ) 

156 ) 

157 

158 def testEnablingMemoryScaling(self): 

159 """Test enabling the memory scaling mechanism.""" 

160 config = BpsConfig({"memoryMultiplier": 2.0}) 

161 job_values = _get_job_values(config, {}, None) 

162 self.assertAlmostEqual(job_values["memory_multiplier"], 2.0) 

163 self.assertEqual(job_values["number_of_retries"], 5) 

164 

165 def testDisablingMemoryScaling(self): 

166 """Test disabling the memory scaling mechanism.""" 

167 config = BpsConfig({"memoryMultiplier": 0.5}) 

168 job_values = _get_job_values(config, {}, None) 

169 self.assertIsNone(job_values["memory_multiplier"]) 

170 

171 def testRetrievingCmdLine(self): 

172 """Test retrieving the command line.""" 

173 cmd_line_key = "runQuantum" 

174 config = BpsConfig({cmd_line_key: "/path/to/foo bar.txt"}) 

175 job_values = _get_job_values(config, {}, cmd_line_key) 

176 self.assertEqual(job_values["executable"].name, "foo") 

177 self.assertEqual(job_values["executable"].src_uri, "/path/to/foo") 

178 self.assertEqual(job_values["arguments"], "bar.txt") 

179 

180 

181if __name__ == "__main__": 181 ↛ 182line 181 didn't jump to line 182, because the condition on line 181 was never true

182 unittest.main()