Coverage for python/lsst/ctrl/mpexec/showInfo.py: 11%

174 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-14 09:14 +0000

1# This file is part of ctrl_mpexec. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = ["ShowInfo"] 

25 

26import fnmatch 

27import re 

28import sys 

29from collections import defaultdict 

30from types import SimpleNamespace 

31from typing import Any 

32 

33import lsst.pex.config as pexConfig 

34import lsst.pex.config.history as pexConfigHistory 

35from lsst.daf.butler import DatasetRef 

36from lsst.pipe.base import Pipeline, QuantumGraph 

37 

38from . import util 

39from .cmdLineFwk import _ButlerFactory 

40 

41 

42class _FilteredStream: 

43 """A file-like object that filters some config fields. 

44 

45 Note 

46 ---- 

47 This class depends on implementation details of ``Config.saveToStream`` 

48 methods, in particular that that method uses single call to write() 

49 method to save information about single config field, and that call 

50 combines comments string(s) for a field and field path and value. 

51 This class will not work reliably on the "import" strings, so imports 

52 should be disabled by passing ``skipImports=True`` to ``saveToStream()``. 

53 """ 

54 

55 def __init__(self, pattern: str, stream: Any = None) -> None: 

56 if stream is None: 

57 stream = sys.stdout 

58 self.stream = stream 

59 # obey case if pattern isn't lowercase or requests NOIGNORECASE 

60 mat = re.search(r"(.*):NOIGNORECASE$", pattern) 

61 

62 if mat: 

63 pattern = mat.group(1) 

64 self._pattern = re.compile(fnmatch.translate(pattern)) 

65 else: 

66 if pattern != pattern.lower(): 

67 print( 

68 f'Matching "{pattern}" without regard to case (append :NOIGNORECASE to prevent this)', 

69 file=self.stream, 

70 ) 

71 self._pattern = re.compile(fnmatch.translate(pattern), re.IGNORECASE) 

72 

73 def write(self, showStr: str) -> None: 

74 # Strip off doc string line(s) and cut off at "=" for string matching 

75 matchStr = showStr.rstrip().split("\n")[-1].split("=")[0] 

76 if self._pattern.search(matchStr): 

77 self.stream.write(showStr) 

78 

79 

80class ShowInfo: 

81 """Show information about a pipeline or quantum graph. 

82 

83 Parameters 

84 ---------- 

85 show : `list` [`str`] 

86 A list of show commands, some of which may have additional parameters 

87 specified using an ``=``. 

88 stream : I/O stream or None. 

89 The output stream to use. `None` will be treated as `sys.stdout`. 

90 

91 Raises 

92 ------ 

93 ValueError 

94 Raised if some show commands are not recognized. 

95 """ 

96 

97 pipeline_commands = {"pipeline", "config", "history", "tasks", "dump-config"} 

98 graph_commands = {"graph", "workflow", "uri"} 

99 

100 def __init__(self, show: list[str], stream: Any = None) -> None: 

101 if stream is None: 

102 # Defer assigning sys.stdout to allow click to redefine it if 

103 # it wants. Assigning the default at class definition leads 

104 # to confusion on reassignment. 

105 stream = sys.stdout 

106 commands: dict[str, list[str]] = defaultdict(list) 

107 for value in show: 

108 command, _, args = value.partition("=") 

109 commands[command].append(args) 

110 self.commands = commands 

111 self.stream = stream 

112 self.handled: set[str] = set() 

113 

114 known = self.pipeline_commands | self.graph_commands 

115 unknown = set(commands) - known 

116 if unknown: 

117 raise ValueError(f"Unknown value(s) for show: {unknown} (choose from '{', '.join(known)}')") 

118 

119 @property 

120 def unhandled(self) -> frozenset[str]: 

121 """Return the commands that have not yet been processed.""" 

122 return frozenset(set(self.commands) - self.handled) 

123 

124 def show_pipeline_info(self, pipeline: Pipeline) -> None: 

125 """Display useful information about the pipeline. 

126 

127 Parameters 

128 ---------- 

129 pipeline : `lsst.pipe.base.Pipeline` 

130 The pipeline to use when reporting information. 

131 """ 

132 for command in self.pipeline_commands: 

133 if command not in self.commands: 

134 continue 

135 args = self.commands[command] 

136 

137 if command == "pipeline": 

138 print(pipeline, file=self.stream) 

139 elif command == "config": 

140 for arg in args: 

141 self._showConfig(pipeline, arg, False) 

142 elif command == "dump-config": 

143 for arg in args: 

144 self._showConfig(pipeline, arg, True) 

145 elif command == "history": 

146 for arg in args: 

147 self._showConfigHistory(pipeline, arg) 

148 elif command == "tasks": 

149 self._showTaskHierarchy(pipeline) 

150 else: 

151 raise RuntimeError(f"Unexpectedly tried to process command {command!r}.") 

152 self.handled.add(command) 

153 

154 def show_graph_info(self, graph: QuantumGraph, args: SimpleNamespace | None = None) -> None: 

155 """Show information associated with this graph. 

156 

157 Parameters 

158 ---------- 

159 graph : `lsst.pipe.base.QuantumGraph` 

160 Graph to use when reporting information. 

161 args : `types.SimpleNamespace`, optional 

162 Parsed command-line parameters. Used to obtain additional external 

163 information such as the location of a usable Butler. 

164 """ 

165 for command in self.graph_commands: 

166 if command not in self.commands: 

167 continue 

168 if command == "graph": 

169 self._showGraph(graph) 

170 elif command == "uri": 

171 if args is None: 

172 raise ValueError("The uri option requires additional command line arguments.") 

173 self._showUri(graph, args) 

174 elif command == "workflow": 

175 self._showWorkflow(graph) 

176 else: 

177 raise RuntimeError(f"Unexpectedly tried to process command {command!r}.") 

178 self.handled.add(command) 

179 

180 def _showConfig(self, pipeline: Pipeline, showArgs: str, dumpFullConfig: bool) -> None: 

181 """Show task configuration 

182 

183 Parameters 

184 ---------- 

185 pipeline : `lsst.pipe.base.Pipeline` 

186 Pipeline definition 

187 showArgs : `str` 

188 Defines what to show 

189 dumpFullConfig : `bool` 

190 If true then dump complete task configuration with all imports. 

191 """ 

192 stream: Any = self.stream 

193 if dumpFullConfig: 

194 # Task label can be given with this option 

195 taskName = showArgs 

196 else: 

197 # The argument can have form [TaskLabel::][pattern:NOIGNORECASE] 

198 matConfig = re.search(r"^(?:(\w+)::)?(?:config.)?(.+)?", showArgs) 

199 assert matConfig is not None, "regex always matches" 

200 taskName = matConfig.group(1) 

201 pattern = matConfig.group(2) 

202 if pattern: 

203 stream = _FilteredStream(pattern, stream=stream) 

204 

205 tasks = util.filterTasks(pipeline, taskName) 

206 if not tasks: 

207 raise ValueError("Pipeline has no tasks named {}".format(taskName)) 

208 

209 for taskDef in tasks: 

210 print("### Configuration for task `{}'".format(taskDef.label), file=self.stream) 

211 taskDef.config.saveToStream(stream, root="config", skipImports=not dumpFullConfig) 

212 

213 def _showConfigHistory(self, pipeline: Pipeline, showArgs: str) -> None: 

214 """Show history for task configuration. 

215 

216 Parameters 

217 ---------- 

218 pipeline : `lsst.pipe.base.Pipeline` 

219 Pipeline definition 

220 showArgs : `str` 

221 Defines what to show 

222 """ 

223 taskName = None 

224 pattern = None 

225 matHistory = re.search(r"^(?:(\w+)::)?(?:config[.])?(.+)", showArgs) 

226 if matHistory: 

227 taskName = matHistory.group(1) 

228 pattern = matHistory.group(2) 

229 if not pattern: 

230 raise ValueError("Please provide a value with --show history (e.g. history=Task::param)") 

231 

232 tasks = util.filterTasks(pipeline, taskName) 

233 if not tasks: 

234 raise ValueError(f"Pipeline has no tasks named {taskName}") 

235 

236 found = False 

237 for taskDef in tasks: 

238 config = taskDef.config 

239 

240 # Look for any matches in the config hierarchy for this name 

241 for nmatch, thisName in enumerate(fnmatch.filter(config.names(), pattern)): 

242 if nmatch > 0: 

243 print("", file=self.stream) 

244 

245 cpath, _, cname = thisName.rpartition(".") 

246 try: 

247 if not cpath: 

248 # looking for top-level field 

249 hconfig = taskDef.config 

250 else: 

251 hconfig = eval("config." + cpath, {}, {"config": config}) 

252 except AttributeError: 

253 print( 

254 f"Error: Unable to extract attribute {cpath} from task {taskDef.label}", 

255 file=sys.stderr, 

256 ) 

257 hconfig = None 

258 

259 # Sometimes we end up with a non-Config so skip those 

260 if isinstance(hconfig, (pexConfig.Config, pexConfig.ConfigurableInstance)) and hasattr( 

261 hconfig, cname 

262 ): 

263 print(f"### Configuration field for task `{taskDef.label}'", file=self.stream) 

264 print(pexConfigHistory.format(hconfig, cname), file=self.stream) 

265 found = True 

266 

267 if not found: 

268 raise ValueError(f"None of the tasks has field matching {pattern}") 

269 

270 def _showTaskHierarchy(self, pipeline: Pipeline) -> None: 

271 """Print task hierarchy to stdout 

272 

273 Parameters 

274 ---------- 

275 pipeline: `lsst.pipe.base.Pipeline` 

276 Pipeline definition. 

277 """ 

278 for taskDef in pipeline.toExpandedPipeline(): 

279 print("### Subtasks for task `{}'".format(taskDef.taskName), file=self.stream) 

280 

281 for configName, taskName in util.subTaskIter(taskDef.config): 

282 print("{}: {}".format(configName, taskName), file=self.stream) 

283 

284 def _showGraph(self, graph: QuantumGraph) -> None: 

285 """Print quanta information to stdout 

286 

287 Parameters 

288 ---------- 

289 graph : `lsst.pipe.base.QuantumGraph` 

290 Execution graph. 

291 """ 

292 for taskNode in graph.taskGraph: 

293 print(taskNode, file=self.stream) 

294 

295 for iq, quantum in enumerate(graph.getQuantaForTask(taskNode)): 

296 print(" Quantum {}:".format(iq), file=self.stream) 

297 print(" inputs:", file=self.stream) 

298 for key, refs in quantum.inputs.items(): 

299 dataIds = ["DataId({})".format(ref.dataId) for ref in refs] 

300 print(" {}: [{}]".format(key, ", ".join(dataIds)), file=self.stream) 

301 print(" outputs:", file=self.stream) 

302 for key, refs in quantum.outputs.items(): 

303 dataIds = ["DataId({})".format(ref.dataId) for ref in refs] 

304 print(" {}: [{}]".format(key, ", ".join(dataIds)), file=self.stream) 

305 

306 def _showWorkflow(self, graph: QuantumGraph) -> None: 

307 """Print quanta information and dependency to stdout 

308 

309 Parameters 

310 ---------- 

311 graph : `lsst.pipe.base.QuantumGraph` 

312 Execution graph. 

313 """ 

314 for node in graph: 

315 print(f"Quantum {node.nodeId}: {node.taskDef.taskName}", file=self.stream) 

316 for parent in graph.determineInputsToQuantumNode(node): 

317 print(f"Parent Quantum {parent.nodeId} - Child Quantum {node.nodeId}", file=self.stream) 

318 

319 def _showUri(self, graph: QuantumGraph, args: SimpleNamespace) -> None: 

320 """Print input and predicted output URIs to stdout 

321 

322 Parameters 

323 ---------- 

324 graph : `lsst.pipe.base.QuantumGraph` 

325 Execution graph 

326 args : `types.SimpleNamespace` 

327 Parsed command line 

328 """ 

329 

330 def dumpURIs(thisRef: DatasetRef) -> None: 

331 primary, components = butler.getURIs(thisRef, predict=True, run="TBD") 

332 if primary: 

333 print(f" {primary}", file=self.stream) 

334 else: 

335 print(" (disassembled artifact)", file=self.stream) 

336 for compName, compUri in components.items(): 

337 print(f" {compName}: {compUri}", file=self.stream) 

338 

339 butler = _ButlerFactory.makeReadButler(args) 

340 for node in graph: 

341 print(f"Quantum {node.nodeId}: {node.taskDef.taskName}", file=self.stream) 

342 print(" inputs:", file=self.stream) 

343 for key, refs in node.quantum.inputs.items(): 

344 for ref in refs: 

345 dumpURIs(ref) 

346 print(" outputs:", file=self.stream) 

347 for key, refs in node.quantum.outputs.items(): 

348 for ref in refs: 

349 dumpURIs(ref)