Coverage for python/lsst/ctrl/mpexec/showInfo.py: 11%
174 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-30 02:36 -0800
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-30 02:36 -0800
1# This file is part of ctrl_mpexec.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = ["ShowInfo"]
26import fnmatch
27import re
28import sys
29from collections import defaultdict
30from types import SimpleNamespace
31from typing import Any, Optional
33import lsst.pex.config as pexConfig
34import lsst.pex.config.history as pexConfigHistory
35from lsst.daf.butler import DatasetRef
36from lsst.pipe.base import Pipeline, QuantumGraph
38from . import util
39from .cmdLineFwk import _ButlerFactory
42class _FilteredStream:
43 """A file-like object that filters some config fields.
45 Note
46 ----
47 This class depends on implementation details of ``Config.saveToStream``
48 methods, in particular that that method uses single call to write()
49 method to save information about single config field, and that call
50 combines comments string(s) for a field and field path and value.
51 This class will not work reliably on the "import" strings, so imports
52 should be disabled by passing ``skipImports=True`` to ``saveToStream()``.
53 """
55 def __init__(self, pattern: str, stream: Any = None) -> None:
56 if stream is None:
57 stream = sys.stdout
58 self.stream = stream
59 # obey case if pattern isn't lowercase or requests NOIGNORECASE
60 mat = re.search(r"(.*):NOIGNORECASE$", pattern)
62 if mat:
63 pattern = mat.group(1)
64 self._pattern = re.compile(fnmatch.translate(pattern))
65 else:
66 if pattern != pattern.lower():
67 print(
68 f'Matching "{pattern}" without regard to case ' "(append :NOIGNORECASE to prevent this)",
69 file=self.stream,
70 )
71 self._pattern = re.compile(fnmatch.translate(pattern), re.IGNORECASE)
73 def write(self, showStr: str) -> None:
74 # Strip off doc string line(s) and cut off at "=" for string matching
75 matchStr = showStr.rstrip().split("\n")[-1].split("=")[0]
76 if self._pattern.search(matchStr):
77 self.stream.write(showStr)
80class ShowInfo:
81 """Show information about a pipeline or quantum graph.
83 Parameters
84 ----------
85 show : `list` [`str`]
86 A list of show commands, some of which may have additional parameters
87 specified using an ``=``.
88 stream : I/O stream or None.
89 The output stream to use. `None` will be treated as `sys.stdout`.
91 Raises
92 ------
93 ValueError
94 Raised if some show commands are not recognized.
95 """
97 pipeline_commands = {"pipeline", "config", "history", "tasks", "dump-config"}
98 graph_commands = {"graph", "workflow", "uri"}
100 def __init__(self, show: list[str], stream: Any = None) -> None:
101 if stream is None:
102 # Defer assigning sys.stdout to allow click to redefine it if
103 # it wants. Assigning the default at class definition leads
104 # to confusion on reassignment.
105 stream = sys.stdout
106 commands: dict[str, list[str]] = defaultdict(list)
107 for value in show:
108 command, _, args = value.partition("=")
109 commands[command].append(args)
110 self.commands = commands
111 self.stream = stream
112 self.handled: set[str] = set()
114 known = self.pipeline_commands | self.graph_commands
115 unknown = set(commands) - known
116 if unknown:
117 raise ValueError(f"Unknown value(s) for show: {unknown} (choose from '{', '.join(known)}')")
119 @property
120 def unhandled(self) -> frozenset[str]:
121 """Return the commands that have not yet been processed."""
122 return frozenset(set(self.commands) - self.handled)
124 def show_pipeline_info(self, pipeline: Pipeline) -> None:
125 """Display useful information about the pipeline.
127 Parameters
128 ----------
129 pipeline : `lsst.pipe.base.Pipeline`
130 The pipeline to use when reporting information.
131 """
132 for command in self.pipeline_commands:
133 if command not in self.commands:
134 continue
135 args = self.commands[command]
137 if command == "pipeline":
138 print(pipeline, file=self.stream)
139 elif command == "config":
140 for arg in args:
141 self._showConfig(pipeline, arg, False)
142 elif command == "dump-config":
143 for arg in args:
144 self._showConfig(pipeline, arg, True)
145 elif command == "history":
146 for arg in args:
147 self._showConfigHistory(pipeline, arg)
148 elif command == "tasks":
149 self._showTaskHierarchy(pipeline)
150 else:
151 raise RuntimeError(f"Unexpectedly tried to process command {command!r}.")
152 self.handled.add(command)
154 def show_graph_info(self, graph: QuantumGraph, args: Optional[SimpleNamespace] = None) -> None:
155 """Show information associated with this graph.
157 Parameters
158 ----------
159 graph : `lsst.pipe.base.QuantumGraph`
160 Graph to use when reporting information.
161 args : `types.SimpleNamespace`, optional
162 Parsed command-line parameters. Used to obtain additional external
163 information such as the location of a usable Butler.
164 """
165 for command in self.graph_commands:
166 if command not in self.commands:
167 continue
168 if command == "graph":
169 self._showGraph(graph)
170 elif command == "uri":
171 if args is None:
172 raise ValueError("The uri option requires additional command line arguments.")
173 self._showUri(graph, args)
174 elif command == "workflow":
175 self._showWorkflow(graph)
176 else:
177 raise RuntimeError(f"Unexpectedly tried to process command {command!r}.")
178 self.handled.add(command)
180 def _showConfig(self, pipeline: Pipeline, showArgs: str, dumpFullConfig: bool) -> None:
181 """Show task configuration
183 Parameters
184 ----------
185 pipeline : `lsst.pipe.base.Pipeline`
186 Pipeline definition
187 showArgs : `str`
188 Defines what to show
189 dumpFullConfig : `bool`
190 If true then dump complete task configuration with all imports.
191 """
192 stream: Any = self.stream
193 if dumpFullConfig:
194 # Task label can be given with this option
195 taskName = showArgs
196 else:
197 # The argument can have form [TaskLabel::][pattern:NOIGNORECASE]
198 matConfig = re.search(r"^(?:(\w+)::)?(?:config.)?(.+)?", showArgs)
199 assert matConfig is not None, "regex always matches"
200 taskName = matConfig.group(1)
201 pattern = matConfig.group(2)
202 if pattern:
203 stream = _FilteredStream(pattern, stream=stream)
205 tasks = util.filterTasks(pipeline, taskName)
206 if not tasks:
207 raise ValueError("Pipeline has no tasks named {}".format(taskName))
209 for taskDef in tasks:
210 print("### Configuration for task `{}'".format(taskDef.label), file=self.stream)
211 taskDef.config.saveToStream(stream, root="config", skipImports=not dumpFullConfig)
213 def _showConfigHistory(self, pipeline: Pipeline, showArgs: str) -> None:
214 """Show history for task configuration
216 Parameters
217 ----------
218 pipeline : `lsst.pipe.base.Pipeline`
219 Pipeline definition
220 showArgs : `str`
221 Defines what to show
222 """
224 taskName = None
225 pattern = None
226 matHistory = re.search(r"^(?:(\w+)::)?(?:config[.])?(.+)", showArgs)
227 if matHistory:
228 taskName = matHistory.group(1)
229 pattern = matHistory.group(2)
230 if not pattern:
231 raise ValueError("Please provide a value with --show history (e.g. history=Task::param)")
233 tasks = util.filterTasks(pipeline, taskName)
234 if not tasks:
235 raise ValueError(f"Pipeline has no tasks named {taskName}")
237 found = False
238 for taskDef in tasks:
240 config = taskDef.config
242 # Look for any matches in the config hierarchy for this name
243 for nmatch, thisName in enumerate(fnmatch.filter(config.names(), pattern)):
244 if nmatch > 0:
245 print("", file=self.stream)
247 cpath, _, cname = thisName.rpartition(".")
248 try:
249 if not cpath:
250 # looking for top-level field
251 hconfig = taskDef.config
252 else:
253 hconfig = eval("config." + cpath, {}, {"config": config})
254 except AttributeError:
255 print(
256 f"Error: Unable to extract attribute {cpath} from task {taskDef.label}",
257 file=sys.stderr,
258 )
259 hconfig = None
261 # Sometimes we end up with a non-Config so skip those
262 if isinstance(hconfig, (pexConfig.Config, pexConfig.ConfigurableInstance)) and hasattr(
263 hconfig, cname
264 ):
265 print(f"### Configuration field for task `{taskDef.label}'", file=self.stream)
266 print(pexConfigHistory.format(hconfig, cname), file=self.stream)
267 found = True
269 if not found:
270 raise ValueError(f"None of the tasks has field matching {pattern}")
272 def _showTaskHierarchy(self, pipeline: Pipeline) -> None:
273 """Print task hierarchy to stdout
275 Parameters
276 ----------
277 pipeline: `lsst.pipe.base.Pipeline`
278 Pipeline definition.
279 """
280 for taskDef in pipeline.toExpandedPipeline():
281 print("### Subtasks for task `{}'".format(taskDef.taskName), file=self.stream)
283 for configName, taskName in util.subTaskIter(taskDef.config):
284 print("{}: {}".format(configName, taskName), file=self.stream)
286 def _showGraph(self, graph: QuantumGraph) -> None:
287 """Print quanta information to stdout
289 Parameters
290 ----------
291 graph : `lsst.pipe.base.QuantumGraph`
292 Execution graph.
293 """
294 for taskNode in graph.taskGraph:
295 print(taskNode, file=self.stream)
297 for iq, quantum in enumerate(graph.getQuantaForTask(taskNode)):
298 print(" Quantum {}:".format(iq), file=self.stream)
299 print(" inputs:", file=self.stream)
300 for key, refs in quantum.inputs.items():
301 dataIds = ["DataId({})".format(ref.dataId) for ref in refs]
302 print(" {}: [{}]".format(key, ", ".join(dataIds)), file=self.stream)
303 print(" outputs:", file=self.stream)
304 for key, refs in quantum.outputs.items():
305 dataIds = ["DataId({})".format(ref.dataId) for ref in refs]
306 print(" {}: [{}]".format(key, ", ".join(dataIds)), file=self.stream)
308 def _showWorkflow(self, graph: QuantumGraph) -> None:
309 """Print quanta information and dependency to stdout
311 Parameters
312 ----------
313 graph : `lsst.pipe.base.QuantumGraph`
314 Execution graph.
315 """
316 for node in graph:
317 print(f"Quantum {node.nodeId}: {node.taskDef.taskName}", file=self.stream)
318 for parent in graph.determineInputsToQuantumNode(node):
319 print(f"Parent Quantum {parent.nodeId} - Child Quantum {node.nodeId}", file=self.stream)
321 def _showUri(self, graph: QuantumGraph, args: SimpleNamespace) -> None:
322 """Print input and predicted output URIs to stdout
324 Parameters
325 ----------
326 graph : `lsst.pipe.base.QuantumGraph`
327 Execution graph
328 args : `types.SimpleNamespace`
329 Parsed command line
330 """
332 def dumpURIs(thisRef: DatasetRef) -> None:
333 primary, components = butler.getURIs(thisRef, predict=True, run="TBD")
334 if primary:
335 print(f" {primary}", file=self.stream)
336 else:
337 print(" (disassembled artifact)", file=self.stream)
338 for compName, compUri in components.items():
339 print(f" {compName}: {compUri}", file=self.stream)
341 butler = _ButlerFactory.makeReadButler(args)
342 for node in graph:
343 print(f"Quantum {node.nodeId}: {node.taskDef.taskName}", file=self.stream)
344 print(" inputs:", file=self.stream)
345 for key, refs in node.quantum.inputs.items():
346 for ref in refs:
347 dumpURIs(ref)
348 print(" outputs:", file=self.stream)
349 for key, refs in node.quantum.outputs.items():
350 for ref in refs:
351 dumpURIs(ref)