Coverage for python/lsst/ctrl/mpexec/cmdLineFwk.py : 12%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of ctrl_mpexec.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22"""Module defining CmdLineFwk class and related methods.
23"""
25__all__ = ['CmdLineFwk']
27# -------------------------------
28# Imports of standard modules --
29# -------------------------------
30import argparse
31import fnmatch
32import logging
33import re
34import sys
35from typing import Optional, Tuple
36import warnings
38# -----------------------------
39# Imports for other modules --
40# -----------------------------
41from lsst.daf.butler import (
42 Butler,
43 CollectionSearch,
44 CollectionType,
45 Registry,
46)
47from lsst.daf.butler.registry import MissingCollectionError, RegistryDefaults
48import lsst.pex.config as pexConfig
49from lsst.pipe.base import GraphBuilder, Pipeline, QuantumGraph
50from lsst.obs.base import Instrument
51from .dotTools import graph2dot, pipeline2dot
52from .executionGraphFixup import ExecutionGraphFixup
53from .mpGraphExecutor import MPGraphExecutor
54from .preExecInit import PreExecInit
55from .singleQuantumExecutor import SingleQuantumExecutor
56from . import util
57from lsst.utils import doImport
59# ----------------------------------
60# Local non-exported definitions --
61# ----------------------------------
63_LOG = logging.getLogger(__name__.partition(".")[2])
66class _OutputChainedCollectionInfo:
67 """A helper class for handling command-line arguments related to an output
68 `~lsst.daf.butler.CollectionType.CHAINED` collection.
70 Parameters
71 ----------
72 registry : `lsst.daf.butler.Registry`
73 Butler registry that collections will be added to and/or queried from.
74 name : `str`
75 Name of the collection given on the command line.
76 """
77 def __init__(self, registry: Registry, name: str):
78 self.name = name
79 try:
80 self.chain = tuple(registry.getCollectionChain(name))
81 self.exists = True
82 except MissingCollectionError:
83 self.chain = ()
84 self.exists = False
86 def __str__(self):
87 return self.name
89 name: str
90 """Name of the collection provided on the command line (`str`).
91 """
93 exists: bool
94 """Whether this collection already exists in the registry (`bool`).
95 """
97 chain: Tuple[str, ...]
98 """The definition of the collection, if it already exists (`tuple` [`str`]).
100 Empty if the collection does not already exist.
101 """
104class _OutputRunCollectionInfo:
105 """A helper class for handling command-line arguments related to an output
106 `~lsst.daf.butler.CollectionType.RUN` collection.
108 Parameters
109 ----------
110 registry : `lsst.daf.butler.Registry`
111 Butler registry that collections will be added to and/or queried from.
112 name : `str`
113 Name of the collection given on the command line.
114 """
115 def __init__(self, registry: Registry, name: str):
116 self.name = name
117 try:
118 actualType = registry.getCollectionType(name)
119 if actualType is not CollectionType.RUN:
120 raise TypeError(f"Collection '{name}' exists but has type {actualType.name}, not RUN.")
121 self.exists = True
122 except MissingCollectionError:
123 self.exists = False
125 name: str
126 """Name of the collection provided on the command line (`str`).
127 """
129 exists: bool
130 """Whether this collection already exists in the registry (`bool`).
131 """
134class _ButlerFactory:
135 """A helper class for processing command-line arguments related to input
136 and output collections.
138 Parameters
139 ----------
140 registry : `lsst.daf.butler.Registry`
141 Butler registry that collections will be added to and/or queried from.
143 args : `argparse.Namespace`
144 Parsed command-line arguments. The following attributes are used,
145 either at construction or in later methods.
147 ``output``
148 The name of a `~lsst.daf.butler.CollectionType.CHAINED`
149 input/output collection.
151 ``output_run``
152 The name of a `~lsst.daf.butler.CollectionType.RUN` input/output
153 collection.
155 ``extend_run``
156 A boolean indicating whether ``output_run`` should already exist
157 and be extended.
159 ``replace_run``
160 A boolean indicating that (if `True`) ``output_run`` should already
161 exist but will be removed from the output chained collection and
162 replaced with a new one.
164 ``prune_replaced``
165 A boolean indicating whether to prune the replaced run (requires
166 ``replace_run``).
168 ``inputs``
169 Input collections of any type; may be any type handled by
170 `lsst.daf.butler.registry.CollectionSearch.fromExpression`.
172 ``butler_config``
173 Path to a data repository root or configuration file.
175 writeable : `bool`
176 If `True`, a `Butler` is being initialized in a context where actual
177 writes should happens, and hence no output run is necessary.
179 Raises
180 ------
181 ValueError
182 Raised if ``writeable is True`` but there are no output collections.
183 """
184 def __init__(self, registry: Registry, args: argparse.Namespace, writeable: bool):
185 if args.output is not None:
186 self.output = _OutputChainedCollectionInfo(registry, args.output)
187 else:
188 self.output = None
189 if args.output_run is not None:
190 self.outputRun = _OutputRunCollectionInfo(registry, args.output_run)
191 elif self.output is not None:
192 if args.extend_run:
193 runName = self.output.chain[0]
194 else:
195 runName = "{}/{}".format(self.output, Instrument.makeCollectionTimestamp())
196 self.outputRun = _OutputRunCollectionInfo(registry, runName)
197 elif not writeable:
198 # If we're not writing yet, ok to have no output run.
199 self.outputRun = None
200 else:
201 raise ValueError("Cannot write without at least one of (--output, --output-run).")
202 self.inputs = tuple(CollectionSearch.fromExpression(args.input)) if args.input else ()
204 def check(self, args: argparse.Namespace):
205 """Check command-line options for consistency with each other and the
206 data repository.
208 Parameters
209 ----------
210 args : `argparse.Namespace`
211 Parsed command-line arguments. See class documentation for the
212 construction parameter of the same name.
213 """
214 assert not (args.extend_run and args.replace_run), "In mutually-exclusive group in ArgumentParser."
215 if self.inputs and self.output is not None and self.output.exists:
216 raise ValueError("Cannot use --output with existing collection with --inputs.")
217 if args.extend_run and self.outputRun is None:
218 raise ValueError("Cannot --extend-run when no output collection is given.")
219 if args.extend_run and not self.outputRun.exists:
220 raise ValueError(f"Cannot --extend-run; output collection "
221 f"'{self.outputRun.name}' does not exist.")
222 if not args.extend_run and self.outputRun is not None and self.outputRun.exists:
223 raise ValueError(f"Output run '{self.outputRun.name}' already exists, but "
224 f"--extend-run was not given.")
225 if args.prune_replaced and not args.replace_run:
226 raise ValueError("--prune-replaced requires --replace-run.")
227 if args.replace_run and (self.output is None or not self.output.exists):
228 raise ValueError("--output must point to an existing CHAINED collection for --replace-run.")
230 @classmethod
231 def _makeReadParts(cls, args: argparse.Namespace):
232 """Common implementation for `makeReadButler` and
233 `makeRegistryAndCollections`.
235 Parameters
236 ----------
237 args : `argparse.Namespace`
238 Parsed command-line arguments. See class documentation for the
239 construction parameter of the same name.
241 Returns
242 -------
243 butler : `lsst.daf.butler.Butler`
244 A read-only butler constructed from the repo at
245 ``args.butler_config``, but with no default collections.
246 inputs : `lsst.daf.butler.registry.CollectionSearch`
247 A collection search path constructed according to ``args``.
248 self : `_ButlerFactory`
249 A new `_ButlerFactory` instance representing the processed version
250 of ``args``.
251 """
252 butler = Butler(args.butler_config, writeable=False)
253 self = cls(butler.registry, args, writeable=False)
254 self.check(args)
255 if self.output and self.output.exists:
256 if args.replace_run:
257 replaced = self.output.chain[0]
258 inputs = self.output.chain[1:]
259 _LOG.debug("Simulating collection search in '%s' after removing '%s'.",
260 self.output.name, replaced)
261 else:
262 inputs = [self.output.name]
263 else:
264 inputs = list(self.inputs)
265 if args.extend_run:
266 inputs.insert(0, self.outputRun.name)
267 inputs = CollectionSearch.fromExpression(inputs)
268 return butler, inputs, self
270 @classmethod
271 def makeReadButler(cls, args: argparse.Namespace) -> Butler:
272 """Construct a read-only butler according to the given command-line
273 arguments.
275 Parameters
276 ----------
277 args : `argparse.Namespace`
278 Parsed command-line arguments. See class documentation for the
279 construction parameter of the same name.
281 Returns
282 -------
283 butler : `lsst.daf.butler.Butler`
284 A read-only butler initialized with the collections specified by
285 ``args``.
286 """
287 butler, inputs, _ = cls._makeReadParts(args)
288 _LOG.debug("Preparing butler to read from %s.", inputs)
289 return Butler(butler=butler, collections=inputs)
291 @classmethod
292 def makeRegistryAndCollections(cls, args: argparse.Namespace) -> \
293 Tuple[Registry, CollectionSearch, Optional[str]]:
294 """Return a read-only registry, a collection search path, and the name
295 of the run to be used for future writes.
297 Parameters
298 ----------
299 args : `argparse.Namespace`
300 Parsed command-line arguments. See class documentation for the
301 construction parameter of the same name.
303 Returns
304 -------
305 registry : `lsst.daf.butler.Registry`
306 Butler registry that collections will be added to and/or queried
307 from.
308 inputs : `lsst.daf.butler.registry.CollectionSearch`
309 Collections to search for datasets.
310 run : `str` or `None`
311 Name of the output `~lsst.daf.butler.CollectionType.RUN` collection
312 if it already exists, or `None` if it does not.
313 """
314 butler, inputs, self = cls._makeReadParts(args)
315 run = self.outputRun.name if args.extend_run else None
316 _LOG.debug("Preparing registry to read from %s and expect future writes to '%s'.", inputs, run)
317 return butler.registry, inputs, run
319 @classmethod
320 def makeWriteButler(cls, args: argparse.Namespace) -> Butler:
321 """Return a read-write butler initialized to write to and read from
322 the collections specified by the given command-line arguments.
324 Parameters
325 ----------
326 args : `argparse.Namespace`
327 Parsed command-line arguments. See class documentation for the
328 construction parameter of the same name.
330 Returns
331 -------
332 butler : `lsst.daf.butler.Butler`
333 A read-write butler initialized according to the given arguments.
334 """
335 butler = Butler(args.butler_config, writeable=True)
336 self = cls(butler.registry, args, writeable=True)
337 self.check(args)
338 if self.output is not None:
339 chainDefinition = list(self.output.chain if self.output.exists else self.inputs)
340 if args.replace_run:
341 replaced = chainDefinition.pop(0)
342 if args.prune_replaced == "unstore":
343 # Remove datasets from datastore
344 with butler.transaction():
345 refs = butler.registry.queryDatasets(..., collections=replaced)
346 butler.pruneDatasets(refs, unstore=True, run=replaced, disassociate=False)
347 elif args.prune_replaced == "purge":
348 # Erase entire collection and all datasets, need to remove
349 # collection from its chain collection first.
350 with butler.transaction():
351 butler.registry.setCollectionChain(self.output.name, chainDefinition)
352 butler.pruneCollection(replaced, purge=True, unstore=True)
353 elif args.prune_replaced is not None:
354 raise NotImplementedError(
355 f"Unsupported --prune-replaced option '{args.prune_replaced}'."
356 )
357 if not self.output.exists:
358 butler.registry.registerCollection(self.output.name, CollectionType.CHAINED)
359 if not args.extend_run:
360 butler.registry.registerCollection(self.outputRun.name, CollectionType.RUN)
361 chainDefinition.insert(0, self.outputRun.name)
362 butler.registry.setCollectionChain(self.output.name, chainDefinition)
363 _LOG.debug("Preparing butler to write to '%s' and read from '%s'=%s",
364 self.outputRun.name, self.output.name, chainDefinition)
365 butler.registry.defaults = RegistryDefaults(run=self.outputRun.name, collections=self.output.name)
366 else:
367 inputs = CollectionSearch.fromExpression((self.outputRun.name,) + self.inputs)
368 _LOG.debug("Preparing butler to write to '%s' and read from %s.", self.outputRun.name, inputs)
369 butler.registry.defaults = RegistryDefaults(run=self.outputRun.name, collections=inputs)
370 return butler
372 output: Optional[_OutputChainedCollectionInfo]
373 """Information about the output chained collection, if there is or will be
374 one (`_OutputChainedCollectionInfo` or `None`).
375 """
377 outputRun: Optional[_OutputRunCollectionInfo]
378 """Information about the output run collection, if there is or will be
379 one (`_OutputRunCollectionInfo` or `None`).
380 """
382 inputs: Tuple[str, ...]
383 """Input collections provided directly by the user (`tuple` [ `str` ]).
384 """
387class _FilteredStream:
388 """A file-like object that filters some config fields.
390 Note
391 ----
392 This class depends on implementation details of ``Config.saveToStream``
393 methods, in particular that that method uses single call to write()
394 method to save information about single config field, and that call
395 combines comments string(s) for a field and field path and value.
396 This class will not work reliably on the "import" strings, so imports
397 should be disabled by passing ``skipImports=True`` to ``saveToStream()``.
398 """
399 def __init__(self, pattern):
400 # obey case if pattern isn't lowercase or requests NOIGNORECASE
401 mat = re.search(r"(.*):NOIGNORECASE$", pattern)
403 if mat:
404 pattern = mat.group(1)
405 self._pattern = re.compile(fnmatch.translate(pattern))
406 else:
407 if pattern != pattern.lower():
408 print(f"Matching \"{pattern}\" without regard to case "
409 "(append :NOIGNORECASE to prevent this)", file=sys.stdout)
410 self._pattern = re.compile(fnmatch.translate(pattern), re.IGNORECASE)
412 def write(self, showStr):
413 # Strip off doc string line(s) and cut off at "=" for string matching
414 matchStr = showStr.rstrip().split("\n")[-1].split("=")[0]
415 if self._pattern.search(matchStr):
416 sys.stdout.write(showStr)
418# ------------------------
419# Exported definitions --
420# ------------------------
423class CmdLineFwk:
424 """PipelineTask framework which executes tasks from command line.
426 In addition to executing tasks this activator provides additional methods
427 for task management like dumping configuration or execution chain.
428 """
430 MP_TIMEOUT = 9999 # Default timeout (sec) for multiprocessing
432 def __init__(self):
433 pass
435 def makePipeline(self, args):
436 """Build a pipeline from command line arguments.
438 Parameters
439 ----------
440 args : `argparse.Namespace`
441 Parsed command line
443 Returns
444 -------
445 pipeline : `~lsst.pipe.base.Pipeline`
446 """
447 if args.pipeline:
448 pipeline = Pipeline.fromFile(args.pipeline)
449 else:
450 pipeline = Pipeline("anonymous")
452 # loop over all pipeline actions and apply them in order
453 for action in args.pipeline_actions:
454 if action.action == "add_instrument":
456 pipeline.addInstrument(action.value)
458 elif action.action == "new_task":
460 pipeline.addTask(action.value, action.label)
462 elif action.action == "delete_task":
464 pipeline.removeTask(action.label)
466 elif action.action == "config":
468 # action value string is "field=value", split it at '='
469 field, _, value = action.value.partition("=")
470 pipeline.addConfigOverride(action.label, field, value)
472 elif action.action == "configfile":
474 pipeline.addConfigFile(action.label, action.value)
476 else:
478 raise ValueError(f"Unexpected pipeline action: {action.action}")
480 if args.save_pipeline:
481 pipeline.toFile(args.save_pipeline)
483 if args.pipeline_dot:
484 pipeline2dot(pipeline, args.pipeline_dot)
486 return pipeline
488 def makeGraph(self, pipeline, args):
489 """Build a graph from command line arguments.
491 Parameters
492 ----------
493 pipeline : `~lsst.pipe.base.Pipeline`
494 Pipeline, can be empty or ``None`` if graph is read from a file.
495 args : `argparse.Namespace`
496 Parsed command line
498 Returns
499 -------
500 graph : `~lsst.pipe.base.QuantumGraph` or `None`
501 If resulting graph is empty then `None` is returned.
502 """
504 registry, collections, run = _ButlerFactory.makeRegistryAndCollections(args)
506 if args.qgraph:
507 # click passes empty tuple as default value for qgraph_node_id
508 nodes = args.qgraph_node_id or None
509 qgraph = QuantumGraph.loadUri(args.qgraph, registry.dimensions,
510 nodes=nodes, graphID=args.qgraph_id)
512 # pipeline can not be provided in this case
513 if pipeline:
514 raise ValueError("Pipeline must not be given when quantum graph is read from file.")
516 else:
518 # make execution plan (a.k.a. DAG) for pipeline
519 graphBuilder = GraphBuilder(registry,
520 skipExisting=args.skip_existing)
521 qgraph = graphBuilder.makeGraph(pipeline, collections, run, args.data_query)
523 # count quanta in graph and give a warning if it's empty and return None
524 nQuanta = len(qgraph)
525 if nQuanta == 0:
526 warnings.warn("QuantumGraph is empty", stacklevel=2)
527 return None
528 else:
529 _LOG.info("QuantumGraph contains %d quanta for %d tasks, graph ID: %r",
530 nQuanta, len(qgraph.taskGraph), qgraph.graphID)
532 if args.save_qgraph:
533 qgraph.saveUri(args.save_qgraph)
535 if args.save_single_quanta:
536 for quantumNode in qgraph:
537 sqgraph = qgraph.subset(quantumNode)
538 uri = args.save_single_quanta.format(quantumNode.nodeId.number)
539 sqgraph.saveUri(uri)
541 if args.qgraph_dot:
542 graph2dot(qgraph, args.qgraph_dot)
544 return qgraph
546 def runPipeline(self, graph, taskFactory, args, butler=None):
547 """Execute complete QuantumGraph.
549 Parameters
550 ----------
551 graph : `QuantumGraph`
552 Execution graph.
553 taskFactory : `~lsst.pipe.base.TaskFactory`
554 Task factory
555 args : `argparse.Namespace`
556 Parsed command line
557 butler : `~lsst.daf.butler.Butler`, optional
558 Data Butler instance, if not defined then new instance is made
559 using command line options.
560 """
561 # make butler instance
562 if butler is None:
563 butler = _ButlerFactory.makeWriteButler(args)
565 # Enable lsstDebug debugging. Note that this is done once in the
566 # main process before PreExecInit and it is also repeated before
567 # running each task in SingleQuantumExecutor (which may not be
568 # needed if `multipocessing` always uses fork start method).
569 if args.enableLsstDebug:
570 try:
571 _LOG.debug("Will try to import debug.py")
572 import debug # noqa:F401
573 except ImportError:
574 _LOG.warn("No 'debug' module found.")
576 preExecInit = PreExecInit(butler, taskFactory, args.skip_existing)
577 preExecInit.initialize(graph,
578 saveInitOutputs=not args.skip_init_writes,
579 registerDatasetTypes=args.register_dataset_types,
580 saveVersions=not args.no_versions)
582 if not args.init_only:
583 graphFixup = self._importGraphFixup(args)
584 quantumExecutor = SingleQuantumExecutor(taskFactory,
585 skipExisting=args.skip_existing,
586 clobberPartialOutputs=args.clobber_partial_outputs,
587 enableLsstDebug=args.enableLsstDebug)
588 timeout = self.MP_TIMEOUT if args.timeout is None else args.timeout
589 executor = MPGraphExecutor(numProc=args.processes, timeout=timeout,
590 startMethod=args.start_method,
591 quantumExecutor=quantumExecutor,
592 failFast=args.fail_fast,
593 executionGraphFixup=graphFixup)
594 with util.profile(args.profile, _LOG):
595 executor.execute(graph, butler)
597 def showInfo(self, args, pipeline, graph=None):
598 """Display useful info about pipeline and environment.
600 Parameters
601 ----------
602 args : `argparse.Namespace`
603 Parsed command line
604 pipeline : `Pipeline`
605 Pipeline definition
606 graph : `QuantumGraph`, optional
607 Execution graph
608 """
609 showOpts = args.show
610 for what in showOpts:
611 showCommand, _, showArgs = what.partition("=")
613 if showCommand in ["pipeline", "config", "history", "tasks"]:
614 if not pipeline:
615 _LOG.warning("Pipeline is required for --show=%s", showCommand)
616 continue
618 if showCommand in ["graph", "workflow", "uri"]:
619 if not graph:
620 _LOG.warning("QuantumGraph is required for --show=%s", showCommand)
621 continue
623 if showCommand == "pipeline":
624 print(pipeline)
625 elif showCommand == "config":
626 self._showConfig(pipeline, showArgs, False)
627 elif showCommand == "dump-config":
628 self._showConfig(pipeline, showArgs, True)
629 elif showCommand == "history":
630 self._showConfigHistory(pipeline, showArgs)
631 elif showCommand == "tasks":
632 self._showTaskHierarchy(pipeline)
633 elif showCommand == "graph":
634 if graph:
635 self._showGraph(graph)
636 elif showCommand == "uri":
637 if graph:
638 self._showUri(graph, args)
639 elif showCommand == "workflow":
640 if graph:
641 self._showWorkflow(graph, args)
642 else:
643 print("Unknown value for show: %s (choose from '%s')" %
644 (what, "', '".join("pipeline config[=XXX] history=XXX tasks graph".split())),
645 file=sys.stderr)
646 sys.exit(1)
648 def _showConfig(self, pipeline, showArgs, dumpFullConfig):
649 """Show task configuration
651 Parameters
652 ----------
653 pipeline : `Pipeline`
654 Pipeline definition
655 showArgs : `str`
656 Defines what to show
657 dumpFullConfig : `bool`
658 If true then dump complete task configuration with all imports.
659 """
660 stream = sys.stdout
661 if dumpFullConfig:
662 # Task label can be given with this option
663 taskName = showArgs
664 else:
665 # The argument can have form [TaskLabel::][pattern:NOIGNORECASE]
666 matConfig = re.search(r"^(?:(\w+)::)?(?:config.)?(.+)?", showArgs)
667 taskName = matConfig.group(1)
668 pattern = matConfig.group(2)
669 if pattern:
670 stream = _FilteredStream(pattern)
672 tasks = util.filterTasks(pipeline, taskName)
673 if not tasks:
674 print("Pipeline has no tasks named {}".format(taskName), file=sys.stderr)
675 sys.exit(1)
677 for taskDef in tasks:
678 print("### Configuration for task `{}'".format(taskDef.label))
679 taskDef.config.saveToStream(stream, root="config", skipImports=not dumpFullConfig)
681 def _showConfigHistory(self, pipeline, showArgs):
682 """Show history for task configuration
684 Parameters
685 ----------
686 pipeline : `Pipeline`
687 Pipeline definition
688 showArgs : `str`
689 Defines what to show
690 """
692 taskName = None
693 pattern = None
694 matHistory = re.search(r"^(?:(\w+)::)?(?:config[.])?(.+)", showArgs)
695 if matHistory:
696 taskName = matHistory.group(1)
697 pattern = matHistory.group(2)
698 if not pattern:
699 print("Please provide a value with --show history (e.g. history=Task::param)", file=sys.stderr)
700 sys.exit(1)
702 tasks = util.filterTasks(pipeline, taskName)
703 if not tasks:
704 print(f"Pipeline has no tasks named {taskName}", file=sys.stderr)
705 sys.exit(1)
707 found = False
708 for taskDef in tasks:
710 config = taskDef.config
712 # Look for any matches in the config hierarchy for this name
713 for nmatch, thisName in enumerate(fnmatch.filter(config.names(), pattern)):
714 if nmatch > 0:
715 print("")
717 cpath, _, cname = thisName.rpartition(".")
718 try:
719 if not cpath:
720 # looking for top-level field
721 hconfig = taskDef.config
722 else:
723 hconfig = eval("config." + cpath, {}, {"config": config})
724 except AttributeError:
725 print(f"Error: Unable to extract attribute {cpath} from task {taskDef.label}",
726 file=sys.stderr)
727 hconfig = None
729 # Sometimes we end up with a non-Config so skip those
730 if isinstance(hconfig, (pexConfig.Config, pexConfig.ConfigurableInstance)) and \
731 hasattr(hconfig, cname):
732 print(f"### Configuration field for task `{taskDef.label}'")
733 print(pexConfig.history.format(hconfig, cname))
734 found = True
736 if not found:
737 print(f"None of the tasks has field matching {pattern}", file=sys.stderr)
738 sys.exit(1)
740 def _showTaskHierarchy(self, pipeline):
741 """Print task hierarchy to stdout
743 Parameters
744 ----------
745 pipeline: `Pipeline`
746 """
747 for taskDef in pipeline.toExpandedPipeline():
748 print("### Subtasks for task `{}'".format(taskDef.taskName))
750 for configName, taskName in util.subTaskIter(taskDef.config):
751 print("{}: {}".format(configName, taskName))
753 def _showGraph(self, graph):
754 """Print quanta information to stdout
756 Parameters
757 ----------
758 graph : `QuantumGraph`
759 Execution graph.
760 """
761 for taskNode in graph.taskGraph:
762 print(taskNode)
764 for iq, quantum in enumerate(graph.getQuantaForTask(taskNode)):
765 print(" Quantum {}:".format(iq))
766 print(" inputs:")
767 for key, refs in quantum.inputs.items():
768 dataIds = ["DataId({})".format(ref.dataId) for ref in refs]
769 print(" {}: [{}]".format(key, ", ".join(dataIds)))
770 print(" outputs:")
771 for key, refs in quantum.outputs.items():
772 dataIds = ["DataId({})".format(ref.dataId) for ref in refs]
773 print(" {}: [{}]".format(key, ", ".join(dataIds)))
775 def _showWorkflow(self, graph, args):
776 """Print quanta information and dependency to stdout
778 Parameters
779 ----------
780 graph : `QuantumGraph`
781 Execution graph.
782 args : `argparse.Namespace`
783 Parsed command line
784 """
785 for node in graph:
786 print(f"Quantum {node.nodeId.number}: {node.taskDef.taskName}")
787 for parent in graph.determineInputsToQuantumNode(node):
788 print(f"Parent Quantum {parent.nodeId.number} - Child Quantum {node.nodeId.number}")
790 def _showUri(self, graph, args):
791 """Print input and predicted output URIs to stdout
793 Parameters
794 ----------
795 graph : `QuantumGraph`
796 Execution graph
797 args : `argparse.Namespace`
798 Parsed command line
799 """
800 def dumpURIs(thisRef):
801 primary, components = butler.getURIs(thisRef, predict=True, run="TBD")
802 if primary:
803 print(f" {primary}")
804 else:
805 print(" (disassembled artifact)")
806 for compName, compUri in components.items():
807 print(f" {compName}: {compUri}")
809 butler = _ButlerFactory.makeReadButler(args)
810 for node in graph:
811 print(f"Quantum {node.nodeId.number}: {node.taskDef.taskName}")
812 print(" inputs:")
813 for key, refs in node.quantum.inputs.items():
814 for ref in refs:
815 dumpURIs(ref)
816 print(" outputs:")
817 for key, refs in node.quantum.outputs.items():
818 for ref in refs:
819 dumpURIs(ref)
821 def _importGraphFixup(self, args):
822 """Import/instantiate graph fixup object.
824 Parameters
825 ----------
826 args : `argparse.Namespace`
827 Parsed command line.
829 Returns
830 -------
831 fixup : `ExecutionGraphFixup` or `None`
833 Raises
834 ------
835 ValueError
836 Raised if import fails, method call raises exception, or returned
837 instance has unexpected type.
838 """
839 if args.graph_fixup:
840 try:
841 factory = doImport(args.graph_fixup)
842 except Exception as exc:
843 raise ValueError("Failed to import graph fixup class/method") from exc
844 try:
845 fixup = factory()
846 except Exception as exc:
847 raise ValueError("Failed to make instance of graph fixup") from exc
848 if not isinstance(fixup, ExecutionGraphFixup):
849 raise ValueError("Graph fixup is not an instance of ExecutionGraphFixup class")
850 return fixup