22 __all__ = [
"ArgumentParser",
"ConfigFileAction",
"ConfigValueAction",
"DataIdContainer",
23 "DatasetArgument",
"ConfigDatasetType",
"InputOnlyArgumentParser"]
44 DEFAULT_INPUT_NAME =
"PIPE_INPUT_ROOT"
45 DEFAULT_CALIB_NAME =
"PIPE_CALIB_ROOT"
46 DEFAULT_OUTPUT_NAME =
"PIPE_OUTPUT_ROOT"
49 def _fixPath(defName, path):
50 """Apply environment variable as default root, if present, and abspath.
55 Name of environment variable containing default root path;
56 if the environment variable does not exist
57 then the path is relative to the current working directory
59 Path relative to default root path.
64 Path that has been expanded, or `None` if the environment variable
65 does not exist and path is `None`.
67 defRoot = os.environ.get(defName)
71 return os.path.abspath(path)
72 return os.path.abspath(os.path.join(defRoot, path
or ""))
76 """Container for data IDs and associated data references.
81 The lowest hierarchy level to descend to for this dataset type,
82 for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`.
83 Use `""` to use the mapper's default for the dataset type.
84 This class does not support `None`, but if it did, `None`
85 would mean the level should not be restricted.
89 Override this class for data IDs that require special handling to be
90 converted to ``data references``, and specify the override class
91 as ``ContainerClass`` for ``add_id_argument``.
93 If you don't want the argument parser to compute data references,
94 specify ``doMakeDataRefList=False`` in ``add_id_argument``.
99 """Dataset type of the data references (`str`).
102 """See parameter ``level`` (`str`).
105 """List of data IDs specified on the command line for the
106 appropriate data ID argument (`list` of `dict`).
109 """List of data references for the data IDs in ``idList``
110 (`list` of `lsst.daf.persistence.ButlerDataRef`).
111 Elements will be omitted if the corresponding data is not found.
112 The list will be empty when returned by ``parse_args`` if
113 ``doMakeDataRefList=False`` was specified in ``add_id_argument``.
117 """Set actual dataset type, once it is known.
126 The reason ``datasetType`` is not a constructor argument is that
127 some subclasses do not know the dataset type until the command
128 is parsed. Thus, to reduce special cases in the code,
129 ``datasetType`` is always set after the command is parsed.
134 """Validate data IDs and cast them to the correct type
135 (modify idList in place).
137 This code casts the values in the data IDs dicts in `dataIdList`
138 to the type required by the butler. Data IDs are read from the
139 command line as `str`, but the butler requires some values to be
140 other types. For example "visit" values should be `int`.
144 butler : `lsst.daf.persistence.Butler`
148 raise RuntimeError(
"Must call setDatasetType first")
150 idKeyTypeDict = butler.getKeys(datasetType=self.
datasetTypedatasetType, level=self.
levellevel)
151 except KeyError
as e:
152 msg = f
"Cannot get keys for datasetType {self.datasetType} at level {self.level}"
153 raise KeyError(msg)
from e
155 for dataDict
in self.
idListidList:
156 for key, strVal
in dataDict.items():
158 keyType = idKeyTypeDict[key]
164 log = lsstLog.Log.getDefaultLogger()
165 log.warn(
"Unexpected ID %s; guessing type is \"%s\"",
166 key,
'str' if keyType == str
else keyType)
167 idKeyTypeDict[key] = keyType
171 castVal = keyType(strVal)
173 raise TypeError(f
"Cannot cast value {strVal!r} to {keyType} for ID key {key}")
174 dataDict[key] = castVal
177 """Compute refList based on idList.
181 namespace : `argparse.Namespace`
182 Results of parsing command-line. The ``butler`` and ``log``
183 elements must be set.
187 Not called if ``add_id_argument`` was called with
188 ``doMakeDataRefList=False``.
191 raise RuntimeError(
"Must call setDatasetType first")
192 butler = namespace.butler
193 for dataId
in self.
idListidList:
194 refList = dafPersist.searchDataRefs(butler, datasetType=self.
datasetTypedatasetType,
195 level=self.
levellevel, dataId=dataId)
197 namespace.log.warn(
"No data found for dataId=%s", dataId)
203 """data ID argument, used by `ArgumentParser.add_id_argument`.
208 Name of identifier (argument name without dashes).
210 Type of dataset; specify a string for a fixed dataset type
211 or a `DatasetArgument` for a dynamic dataset type (e.g.
212 one specified by a command-line argument).
214 The lowest hierarchy level to descend to for this dataset type,
215 for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`.
216 Use `""` to use the mapper's default for the dataset type.
217 Some container classes may also support `None`, which means
218 the level should not be restricted; however the default class,
219 `DataIdContainer`, does not support `None`.
220 doMakeDataRefList : `bool`, optional
221 If `True` (default), construct data references.
222 ContainerClass : `class`, optional
223 Class to contain data IDs and data references; the default class
224 `DataIdContainer` will work for many, but not all, cases.
225 For example if the dataset type is specified on the command line
226 then use `DynamicDatasetType`.
229 def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer):
230 if name.startswith(
"-"):
231 raise RuntimeError(f
"Name {name} must not start with -")
241 """`True` if the dataset type is dynamic (that is, specified
242 on the command line).
244 return isinstance(self.
datasetTypedatasetType, DynamicDatasetType)
247 """Get the dataset type as a string.
266 """Abstract base class for a dataset type determined from parsed
267 command-line arguments.
271 """Add a command-line argument to specify dataset type name,
276 parser : `ArgumentParser`
277 Argument parser to add the argument to.
279 Name of data ID argument, without the leading ``"--"``,
284 The default implementation does nothing
290 """Get the dataset type as a string, based on parsed command-line
298 raise NotImplementedError(
"Subclasses must override")
302 """Dataset type specified by a command-line argument.
306 name : `str`, optional
307 Name of command-line argument (including leading "--",
308 if appropriate) whose value is the dataset type.
309 If `None`, uses ``--idName_dstype`` where idName
310 is the name of the data ID argument (e.g. "id").
311 help : `str`, optional
312 Help string for the command-line argument.
313 default : `object`, optional
314 Default value. If `None`, then the command-line option is required.
315 This argument isignored if the command-line argument is positional
316 (name does not start with "-") because positional arguments do
317 not support default values.
322 help="dataset type to process from input data repository",
325 DynamicDatasetType.__init__(self)
331 """Get the dataset type as a string, from the appropriate
332 command-line argument.
344 argName = self.
namename.lstrip(
"-")
345 return getattr(namespace, argName)
348 """Add a command-line argument to specify the dataset type name.
352 parser : `ArgumentParser`
359 Also sets the `name` attribute if it is currently `None`.
361 help = self.
helphelp
if self.
helphelp
else f
"dataset type for {idName}"
362 if self.
namename
is None:
363 self.
namename = f
"--{idName}_dstype"
364 requiredDict = dict()
365 if self.
namename.startswith(
"-"):
366 requiredDict = dict(required=self.
defaultdefault
is None)
375 """Dataset type specified by a config parameter.
380 Name of config option whose value is the dataset type.
384 DynamicDatasetType.__init__(self)
388 """Return the dataset type as a string, from the appropriate
393 namespace : `argparse.Namespace`
398 keyList = self.
namename.split(
".")
399 value = namespace.config
402 value = getattr(value, key)
404 raise RuntimeError(f
"Cannot find config parameter {self.name!r}")
409 """Argument parser for command-line tasks that is based on
410 `argparse.ArgumentParser`.
415 Name of top-level task; used to identify camera-specific override
417 usage : `str`, optional
418 Command-line usage signature.
420 Additional keyword arguments for `argparse.ArgumentParser`.
424 Users may wish to add additional arguments before calling `parse_args`.
433 """Require an output directory to be specified (`bool`)."""
435 def __init__(self, name, usage="%(prog)s input [options]
", **kwargs):
436 self._name_name = name
438 argparse.ArgumentParser.__init__(self,
440 fromfile_prefix_chars=
'@',
441 epilog=textwrap.dedent(
"""Notes:
442 * --config, --config-file or --configfile, --id, --loglevel and @file may appear multiple times;
443 all values are used, in order left to right
444 * @file reads command-line options from the specified file:
445 * data may be distributed among multiple lines (e.g. one option per line)
446 * data after # is treated as a comment and ignored
447 * blank lines and lines starting with # are ignored
448 * To specify multiple values for an option, do not use = after the option name:
449 * right: --config-file foo bar
450 * wrong: --config-file=foo bar
452 formatter_class=argparse.RawDescriptionHelpFormatter,
454 self.add_argument(metavar=
'input', dest=
"rawInput",
455 help=f
"path to input data repository, relative to ${DEFAULT_INPUT_NAME}")
456 self.add_argument(
"--calib", dest=
"rawCalib",
457 help=f
"path to input calibration repository, relative to ${DEFAULT_CALIB_NAME}")
458 self.add_argument(
"--output", dest=
"rawOutput",
459 help=
"path to output data repository (need not exist), "
460 f
"relative to ${DEFAULT_OUTPUT_NAME}")
461 self.add_argument(
"--rerun", dest=
"rawRerun", metavar=
"[INPUT:]OUTPUT",
462 help=
"rerun name: sets OUTPUT to ROOT/rerun/OUTPUT; "
463 "optionally sets ROOT to ROOT/rerun/INPUT")
464 self.add_argument(
"-c",
"--config", nargs=
"*", action=ConfigValueAction,
465 help=
"config override(s), e.g. -c foo=newfoo bar.baz=3", metavar=
"NAME=VALUE")
466 self.add_argument(
"-C",
"--config-file",
"--configfile",
467 dest=
"configfile", nargs=
"*", action=ConfigFileAction,
468 help=
"config override file(s)")
469 self.add_argument(
"-L",
"--loglevel", nargs=
"*", action=LogLevelAction,
470 help=
"logging level; supported levels are [trace|debug|info|warn|error|fatal]",
471 metavar=
"LEVEL|COMPONENT=LEVEL")
472 self.add_argument(
"--longlog", action=LongLogAction, help=
"use a more verbose format for the logging")
473 self.add_argument(
"--debug", action=
"store_true", help=
"enable debugging output?")
474 self.add_argument(
"--doraise", action=
"store_true",
475 help=
"raise an exception on error (else log a message and continue)?")
476 self.add_argument(
"--noExit", action=
"store_true",
477 help=
"Do not exit even upon failure (i.e. return a struct to the calling script)")
478 self.add_argument(
"--profile", help=
"Dump cProfile statistics to filename")
479 self.add_argument(
"--show", nargs=
"+", default=(),
480 help=
"display the specified information to stdout and quit "
481 "(unless run is specified); information is "
482 "(config[=PATTERN]|history=PATTERN|tasks|data|run)")
483 self.add_argument(
"-j",
"--processes", type=int, default=1, help=
"Number of processes to use")
484 self.add_argument(
"-t",
"--timeout", type=float,
485 help=
"Timeout for multiprocessing; maximum wall time (sec)")
486 self.add_argument(
"--clobber-output", action=
"store_true", dest=
"clobberOutput", default=
False,
487 help=(
"remove and re-create the output directory if it already exists "
488 "(safe with -j, but not all other forms of parallel execution)"))
489 self.add_argument(
"--clobber-config", action=
"store_true", dest=
"clobberConfig", default=
False,
490 help=(
"backup and then overwrite existing config files instead of checking them "
491 "(safe with -j, but not all other forms of parallel execution)"))
492 self.add_argument(
"--no-backup-config", action=
"store_true", dest=
"noBackupConfig", default=
False,
493 help=
"Don't copy config to file~N backup.")
494 self.add_argument(
"--clobber-versions", action=
"store_true", dest=
"clobberVersions", default=
False,
495 help=(
"backup and then overwrite existing package versions instead of checking"
496 "them (safe with -j, but not all other forms of parallel execution)"))
497 self.add_argument(
"--no-versions", action=
"store_true", dest=
"noVersions", default=
False,
498 help=
"don't check package versions; useful for development")
499 lsstLog.configure_prop(
"""
500 log4j.rootLogger=INFO, A1
501 log4j.appender.A1=ConsoleAppender
502 log4j.appender.A1.Target=System.out
503 log4j.appender.A1.layout=PatternLayout
504 log4j.appender.A1.layout.ConversionPattern=%c %p: %m%n
508 lgr = logging.getLogger()
509 lgr.setLevel(logging.INFO)
510 lgr.addHandler(lsstLog.LogHandler())
512 def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True,
513 ContainerClass=DataIdContainer):
514 """Add a data ID argument.
520 Data ID argument (including leading dashes, if wanted).
521 datasetType : `str` or `DynamicDatasetType`-type
522 Type of dataset. Supply a string for a fixed dataset type.
523 For a dynamically determined dataset type, supply
524 a `DynamicDatasetType`, such a `DatasetArgument`.
526 Help string for the argument.
528 The lowest hierarchy level to descend to for this dataset type,
529 for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`.
530 Use `""` to use the mapper's default for the dataset type.
531 Some container classes may also support `None`, which means
532 the level should not be restricted; however the default class,
533 `DataIdContainer`, does not support `None`.
534 doMakeDataRefList : bool, optional
535 If `True` (default), construct data references.
536 ContainerClass : `class`, optional
537 Class to contain data IDs and data references; the default class
538 `DataIdContainer` will work for many, but not all, cases.
539 For example if the dataset type is specified on the command line
540 then use `DynamicDatasetType`.
544 If ``datasetType`` is an instance of `DatasetArgument`,
545 then add a second argument to specify the dataset type.
547 The associated data is put into ``namespace.<dataIdArgument.name>``
548 as an instance of `ContainerClass`; the container includes fields:
550 - ``idList``: a list of data ID dicts.
551 - ``refList``: a list of `~lsst.daf.persistence.Butler`
552 data references (empty if ``doMakeDataRefList`` is `False`).
554 argName = name.lstrip(
"-")
557 raise RuntimeError(f
"Data ID argument {name} already exists")
558 if argName
in set((
"camera",
"config",
"butler",
"log",
"obsPkg")):
559 raise RuntimeError(f
"Data ID argument {name} is a reserved name")
561 self.add_argument(name, nargs=
"*", action=IdValueAction, help=help,
562 metavar=
"KEY=VALUE1[^VALUE2[^VALUE3...]")
566 datasetType=datasetType,
568 doMakeDataRefList=doMakeDataRefList,
569 ContainerClass=ContainerClass,
572 if dataIdArgument.isDynamicDatasetType:
573 datasetType.addArgument(parser=self, idName=argName)
577 def parse_args(self, config, args=None, log=None, override=None):
578 """Parse arguments for a command-line task.
582 config : `lsst.pex.config.Config`
583 Config for the task being run.
584 args : `list`, optional
585 Argument list; if `None` then ``sys.argv[1:]`` is used.
586 log : `lsst.log.Log`, optional
587 `~lsst.log.Log` instance; if `None` use the default log.
588 override : callable, optional
589 A config override function. It must take the root config object
590 as its only argument and must modify the config in place.
591 This function is called after camera-specific overrides files
592 are applied, and before command-line config overrides
593 are applied (thus allowing the user the final word).
597 namespace : `argparse.Namespace`
598 A `~argparse.Namespace` instance containing fields:
600 - ``camera``: camera name.
601 - ``config``: the supplied config with all overrides applied,
602 validated and frozen.
603 - ``butler``: a `lsst.daf.persistence.Butler` for the data.
604 - An entry for each of the data ID arguments registered by
605 `add_id_argument`, of the type passed to its ``ContainerClass``
606 keyword (`~lsst.pipe.base.DataIdContainer` by default). It
607 includes public elements ``idList`` and ``refList``.
608 - ``log``: a `lsst.log` Log.
609 - An entry for each command-line argument,
610 with the following exceptions:
612 - config is the supplied config, suitably updated.
613 - configfile, id and loglevel are all missing.
614 - ``obsPkg``: name of the ``obs_`` package for this camera.
619 if len(args) < 1
or args[0].startswith(
"-")
or args[0].startswith(
"@"):
621 if len(args) == 1
and args[0]
in (
"-h",
"--help"):
624 self.exit(f
"{self.prog}: error: Must specify input as first argument")
628 namespace = argparse.Namespace()
629 namespace.input = _fixPath(DEFAULT_INPUT_NAME, args[0])
630 if not os.path.isdir(namespace.input):
631 self.error(f
"Error: input={namespace.input!r} not found")
633 namespace.config = config
634 namespace.log = log
if log
is not None else lsstLog.Log.getDefaultLogger()
635 mapperClass = dafPersist.Butler.getMapperClass(namespace.input)
636 if mapperClass
is None:
637 self.error(f
"Error: no mapper specified for input repo {namespace.input!r}")
639 namespace.camera = mapperClass.getCameraName()
640 namespace.obsPkg = mapperClass.getPackageName()
645 if override
is not None:
646 override(namespace.config)
650 setattr(namespace, dataIdArgument.name, dataIdArgument.ContainerClass(level=dataIdArgument.level))
652 namespace = argparse.ArgumentParser.parse_args(self, args=args, namespace=namespace)
653 del namespace.configfile
657 if namespace.clobberOutput:
658 if namespace.output
is None:
659 self.error(
"--clobber-output is only valid with --output or --rerun")
660 elif namespace.output == namespace.input:
661 self.error(
"--clobber-output is not valid when the output and input repos are the same")
662 if os.path.exists(namespace.output):
663 namespace.log.info(
"Removing output repo %s for --clobber-output", namespace.output)
664 shutil.rmtree(namespace.output)
666 namespace.log.debug(
"input=%s", namespace.input)
667 namespace.log.debug(
"calib=%s", namespace.calib)
668 namespace.log.debug(
"output=%s", namespace.output)
673 if self.
requireOutputrequireOutput
and namespace.output
is None and namespace.rerun
is None:
674 self.error(
"no output directory specified.\n"
675 "An output directory must be specified with the --output or --rerun\n"
676 "command-line arguments.\n")
680 butlerArgs = {
'mapperArgs': {
'calibRoot': namespace.calib}}
682 outputs = {
'root': namespace.output,
'mode':
'rw'}
683 inputs = {
'root': namespace.input}
684 inputs.update(butlerArgs)
685 outputs.update(butlerArgs)
686 namespace.butler = dafPersist.Butler(inputs=inputs, outputs=outputs)
688 outputs = {
'root': namespace.input,
'mode':
'rw'}
689 outputs.update(butlerArgs)
690 namespace.butler = dafPersist.Butler(outputs=outputs)
697 if "data" in namespace.show:
699 for dataRef
in getattr(namespace, dataIdName).refList:
700 print(f
"{dataIdName} dataRef.dataId = {dataRef.dataId}")
702 if namespace.show
and "run" not in namespace.show:
710 print(
"Warning: no 'debug' module found", file=sys.stderr)
711 namespace.debug =
False
713 del namespace.loglevel
714 del namespace.longlog
716 namespace.config.validate()
717 namespace.config.freeze()
721 def _parseDirectories(self, namespace):
722 """Parse input, output and calib directories
724 This allows for hacking the directories, e.g., to include a
726 Modifications are made to the 'namespace' object in-place.
728 mapperClass = dafPersist.Butler.getMapperClass(_fixPath(DEFAULT_INPUT_NAME, namespace.rawInput))
729 namespace.calib = _fixPath(DEFAULT_CALIB_NAME, namespace.rawCalib)
733 if namespace.rawOutput:
734 namespace.output = _fixPath(DEFAULT_OUTPUT_NAME, namespace.rawOutput)
736 namespace.output =
None
744 if namespace.rawRerun:
746 self.error(
"Error: cannot specify both --output and --rerun")
747 namespace.rerun = namespace.rawRerun.split(
":")
748 rerunDir = [os.path.join(namespace.input,
"rerun", dd)
for dd
in namespace.rerun]
749 modifiedInput =
False
750 if len(rerunDir) == 2:
751 namespace.input, namespace.output = rerunDir
753 elif len(rerunDir) == 1:
754 namespace.output = rerunDir[0]
755 if os.path.exists(os.path.join(namespace.output,
"_parent")):
756 namespace.input = os.path.realpath(os.path.join(namespace.output,
"_parent"))
759 self.error(f
"Error: invalid argument for --rerun: {namespace.rerun}")
760 if modifiedInput
and dafPersist.Butler.getMapperClass(namespace.input) != mapperClass:
761 self.error(
"Error: input directory specified by --rerun must have the same mapper as INPUT")
763 namespace.rerun =
None
764 del namespace.rawInput
765 del namespace.rawCalib
766 del namespace.rawOutput
767 del namespace.rawRerun
769 def _processDataIds(self, namespace):
770 """Process the parsed data for each data ID argument in an
771 `~argparse.Namespace`.
775 - Validate data ID keys.
776 - Cast the data ID values to the correct type.
777 - Compute data references from data IDs.
781 namespace : `argparse.Namespace`
782 Parsed namespace. These attributes are read:
786 - ``config``, if any dynamic dataset types are set by
788 - Dataset type arguments (e.g. ``id_dstype``), if any dynamic
789 dataset types are specified by such
791 These attributes are modified:
793 - ``<name>`` for each data ID argument registered using
794 `add_id_argument` with name ``<name>``.
797 dataIdContainer = getattr(namespace, dataIdArgument.name)
798 dataIdContainer.setDatasetType(dataIdArgument.getDatasetType(namespace))
799 if dataIdArgument.doMakeDataRefList:
801 dataIdContainer.castDataIds(butler=namespace.butler)
802 except (KeyError, TypeError)
as e:
808 dataIdContainer.makeDataRefList(namespace)
810 def _applyInitialOverrides(self, namespace):
811 """Apply obs-package-specific and camera-specific config
812 override files, if found
816 namespace : `argparse.Namespace`
817 Parsed namespace. These attributes are read:
821 Look in the package namespace.obsPkg for files:
823 - ``config/<task_name>.py``
824 - ``config/<camera_name>/<task_name>.py`` and load if found.
827 fileName = self.
_name_name +
".py"
829 os.path.join(obsPkgDir,
"config", fileName),
830 os.path.join(obsPkgDir,
"config", namespace.camera, fileName),
832 if os.path.exists(filePath):
833 namespace.log.info(
"Loading config overrride file %r", filePath)
834 namespace.config.load(filePath)
836 namespace.log.debug(
"Config override file does not exist: %r", filePath)
839 """Perform camera-specific operations before parsing the command-line.
843 namespace : `argparse.Namespace`
844 Namespace (an ) with the following fields:
846 - ``camera``: the camera name.
847 - ``config``: the config passed to parse_args, with no overrides
849 - ``obsPkg``: the ``obs_`` package for this camera.
850 - ``log``: a `lsst.log` Log.
854 The default implementation does nothing.
859 """Allow files of arguments referenced by ``@<path>`` to contain
860 multiple values on each line.
865 Line of text read from an argument file.
867 arg_line = arg_line.strip()
868 if not arg_line
or arg_line.startswith(
"#"):
870 for arg
in shlex.split(arg_line, comments=
True, posix=
True):
876 """Add a "--reuse-outputs-from SUBTASK" option to the argument
879 CmdLineTasks that can be restarted at an intermediate step using
880 outputs from earlier (but still internal) steps should use this
881 method to allow the user to control whether that happens when
882 outputs from earlier steps are present.
887 A sequence of string names (by convention, top-level subtasks)
888 that identify the steps that could be skipped when their
889 outputs are already present. The list is ordered, so when the
890 user specifies one step on the command line, all previous steps
891 may be skipped as well. In addition to the choices provided,
892 users may pass "all" to indicate that all steps may be thus
895 When this method is called, the ``namespace`` object returned by
896 ``parse_args`` will contain a ``reuse`` attribute containing
897 a list of all steps that should be skipped if their outputs
899 If no steps should be skipped, the ``reuse`` will be an empty list.
901 choices = list(choices)
902 choices.append(
"all")
903 self.add_argument(
"--reuse-outputs-from", dest=
"reuse", choices=choices,
904 default=[], action=ReuseAction,
905 help=(
"Skip the given subtask and its predecessors and reuse their outputs "
906 "if those outputs already exist. Use 'all' to specify all subtasks."))
910 """`ArgumentParser` for command-line tasks that don't write any output.
913 requireOutput =
False
917 """Get a dictionary of task info for all subtasks in a config
921 config : `lsst.pex.config.Config`
922 Configuration to process.
923 taskDict : `dict`, optional
924 Users should not specify this argument. Supports recursion.
925 If provided, taskDict is updated in place, else a new `dict`
927 baseName : `str`, optional
928 Users should not specify this argument. It is only used for
929 recursion: if a non-empty string then a period is appended
930 and the result is used as a prefix for additional entries
931 in taskDict; otherwise no prefix is used.
936 Keys are config field names, values are task names.
940 This function is designed to be called recursively.
941 The user should call with only a config (leaving taskDict and baseName
942 at their default values).
946 for fieldName, field
in config.items():
947 if hasattr(field,
"value")
and hasattr(field,
"target"):
948 subConfig = field.value
949 if isinstance(subConfig, pexConfig.Config):
950 subBaseName = f
"{baseName}.{fieldName}" if baseName
else fieldName
952 taskName = f
"{field.target.__module__}.{field.target.__name__}"
954 taskName = repr(field.target)
955 taskDict[subBaseName] = taskName
956 getTaskDict(config=subConfig, taskDict=taskDict, baseName=subBaseName)
961 """Process arguments specified with ``--show`` (but ignores
966 showOpts : `list` of `str`
967 List of options passed to ``--show``.
970 exit : bool, optional
971 Exit if ``"run"`` isn't included in ``showOpts``.
975 Supports the following options in showOpts:
977 - ``config[=PAT]``. Dump all the config entries, or just the ones that
978 match the glob pattern.
979 - ``history=PAT``. Show where the config entries that match the glob
981 - ``tasks``. Show task hierarchy.
982 - ``data``. Ignored; to be processed by caller.
983 - ``run``. Keep going (the default behaviour is to exit if
984 ``--show`` is specified).
986 Calls ``sys.exit(1)`` if any other option found.
991 for what
in showOpts:
992 showCommand, showArgs = what.split(
"=", 1)
if "=" in what
else (what,
"")
994 if showCommand ==
"config":
995 matConfig = re.search(
r"^(?:config.)?(.+)?", showArgs)
996 pattern = matConfig.group(1)
998 class FilteredStream:
999 """A file object that only prints lines
1000 that match the glob "pattern".
1002 N.b. Newlines are silently discarded and reinserted;
1003 crude but effective.
1006 def __init__(self, pattern):
1009 mat = re.search(
r"(.*):NOIGNORECASE$", pattern)
1012 pattern = mat.group(1)
1013 self._pattern = re.compile(fnmatch.translate(pattern))
1015 if pattern != pattern.lower():
1016 print(f
"Matching {pattern!r} without regard to case "
1017 "(append :NOIGNORECASE to prevent this)", file=sys.stdout)
1018 self._pattern = re.compile(fnmatch.translate(pattern), re.IGNORECASE)
1020 def write(self, showStr):
1021 showStr = showStr.rstrip()
1024 matchStr = showStr.split(
"\n")[-1].split(
"=")[0]
1025 if self._pattern.search(matchStr):
1026 print(
"\n" + showStr)
1028 fd = FilteredStream(pattern)
1032 config.saveToStream(fd,
"config")
1033 elif showCommand ==
"history":
1034 matHistory = re.search(
r"^(?:config.)?(.+)?", showArgs)
1035 globPattern = matHistory.group(1)
1037 print(
"Please provide a value with --show history (e.g. history=*.doXXX)", file=sys.stderr)
1041 for i, pattern
in enumerate(fnmatch.filter(config.names(), globPattern)):
1045 pattern = pattern.split(
".")
1046 cpath, cname = pattern[:-1], pattern[-1]
1048 for i, cpt
in enumerate(cpath):
1050 hconfig = getattr(hconfig, cpt)
1051 except AttributeError:
1052 config_path =
".".join([
"config"] + cpath[:i])
1053 print(f
"Error: configuration {config_path} has no subconfig {cpt}", file=sys.stderr)
1057 print(pexConfig.history.format(hconfig, cname))
1059 config_path =
".".join([
"config"] + cpath)
1060 print(f
"Error: {config_path} has no field {cname}", file=sys.stderr)
1066 elif showCommand ==
"data":
1068 elif showCommand ==
"run":
1070 elif showCommand ==
"tasks":
1073 choices =
"', '".join(
"config[=XXX] data history=XXX tasks run".split())
1074 print(f
"Unknown value for show: {what} (choose from {choices!r})", file=sys.stderr)
1077 if exit
and "run" not in showOpts:
1082 """Print task hierarchy to stdout.
1086 config : `lsst.pex.config.Config`
1087 Configuration to process.
1092 fieldNameList = sorted(taskDict.keys())
1093 for fieldName
in fieldNameList:
1094 taskName = taskDict[fieldName]
1095 print(f
"{fieldName}: {taskName}")
1099 """argparse action callback to override config parameters using
1100 name=value pairs from the command-line.
1103 def __call__(self, parser, namespace, values, option_string):
1104 """Override one or more config name value pairs.
1108 parser : `argparse.ArgumentParser`
1110 namespace : `argparse.Namespace`
1111 Parsed command. The ``namespace.config`` attribute is updated.
1113 A list of ``configItemName=value`` pairs.
1114 option_string : `str`
1115 Option value specified by the user.
1117 if namespace.config
is None:
1119 for nameValue
in values:
1120 name, sep, valueStr = nameValue.partition(
"=")
1122 parser.error(f
"{option_string} value {nameValue} must be in form name=value")
1127 except AttributeError:
1128 parser.error(f
"no config field: {name}")
1131 value = eval(valueStr, {})
1133 parser.error(f
"cannot parse {valueStr!r} as a value for {name}")
1136 except Exception
as e:
1137 parser.error(f
"cannot set config.{name}={value!r}: {e}")
1141 """argparse action to load config overrides from one or more files.
1144 def __call__(self, parser, namespace, values, option_string=None):
1145 """Load one or more files of config overrides.
1149 parser : `argparse.ArgumentParser`
1151 namespace : `argparse.Namespace`
1152 Parsed command. The following attributes are updated by this
1153 method: ``namespace.config``.
1155 A list of data config file paths.
1156 option_string : `str`, optional
1157 Option value specified by the user.
1159 if namespace.config
is None:
1161 for configfile
in values:
1163 namespace.config.load(configfile)
1164 except Exception
as e:
1165 parser.error(f
"cannot load config file {configfile!r}: {e}")
1169 """argparse action callback to process a data ID into a dict.
1172 def __call__(self, parser, namespace, values, option_string):
1173 """Parse ``--id`` data and append results to
1174 ``namespace.<argument>.idList``.
1178 parser : `ArgumentParser`
1180 namespace : `argparse.Namespace`
1181 Parsed command (an instance of argparse.Namespace).
1182 The following attributes are updated:
1184 - ``<idName>.idList``, where ``<idName>`` is the name of the
1185 ID argument, for instance ``"id"`` for ID argument ``--id``.
1187 A list of data IDs; see Notes below.
1188 option_string : `str`
1189 Option value specified by the user.
1193 The data format is::
1195 key1=value1_1[^value1_2[^value1_3...]
1196 key2=value2_1[^value2_2[^value2_3...]...
1198 The values (e.g. ``value1_1``) may either be a string,
1199 or of the form ``"int..int"`` (e.g. ``"1..3"``) which is
1200 interpreted as ``"1^2^3"`` (inclusive, unlike a python range).
1201 So ``"0^2..4^7..9"`` is equivalent to ``"0^2^3^4^7^8^9"``.
1202 You may also specify a stride: ``"1..5:2"`` is ``"1^3^5"``.
1204 The cross product is computed for keys with multiple values.
1207 --id visit 1^2 ccd 1,1^2,2
1209 results in the following data ID dicts being appended to
1210 ``namespace.<argument>.idList``:
1212 {"visit":1, "ccd":"1,1"}
1213 {"visit":2, "ccd":"1,1"}
1214 {"visit":1, "ccd":"2,2"}
1215 {"visit":2, "ccd":"2,2"}
1217 if namespace.config
is None:
1219 idDict = collections.OrderedDict()
1220 for nameValue
in values:
1221 name, sep, valueStr = nameValue.partition(
"=")
1223 parser.error(f
"{name} appears multiple times in one ID argument: {option_string}")
1225 for v
in valueStr.split(
"^"):
1226 mat = re.search(
r"^(\d+)\.\.(\d+)(?::(\d+))?$", v)
1228 v1 = int(mat.group(1))
1229 v2 = int(mat.group(2))
1231 v3 = int(v3)
if v3
else 1
1232 for v
in range(v1, v2 + 1, v3):
1233 idDict[name].append(str(v))
1235 idDict[name].append(v)
1237 iterList = [idDict[key]
for key
in idDict.keys()]
1238 idDictList = [collections.OrderedDict(zip(idDict.keys(), valList))
1239 for valList
in itertools.product(*iterList)]
1241 argName = option_string.lstrip(
"-")
1242 ident = getattr(namespace, argName)
1243 ident.idList += idDictList
1247 """argparse action to make logs verbose.
1249 An action so that it can take effect before log level options.
1252 def __call__(self, parser, namespace, values, option_string):
1257 parser : `ArgumentParser`
1259 namespace : `argparse.Namespace`
1260 Parsed command. This argument is not used.
1263 option_string : `str`
1264 Option value specified by the user (unused).
1266 lsstLog.configure_prop(
"""
1267 log4j.rootLogger=INFO, A1
1268 log4j.appender.A1=ConsoleAppender
1269 log4j.appender.A1.Target=System.out
1270 log4j.appender.A1.layout=PatternLayout
1271 log4j.appender.A1.layout.ConversionPattern=%-5p %d{yyyy-MM-ddTHH:mm:ss.SSSZ} %c (%X{LABEL})(%F:%L)- %m%n
1276 """argparse action to set log level.
1279 def __call__(self, parser, namespace, values, option_string):
1284 parser : `ArgumentParser`
1286 namespace : `argparse.Namespace`
1287 Parsed command. This argument is not used.
1289 List of trace levels; each item must be of the form
1290 ``component_name=level`` or ``level``, where ``level``
1291 is a keyword (not case sensitive) or an integer.
1292 option_string : `str`
1293 Option value specified by the user.
1295 permittedLevelList = (
'TRACE',
'DEBUG',
'INFO',
'WARN',
'ERROR',
'FATAL')
1296 permittedLevelSet = set(permittedLevelList)
1297 for componentLevel
in values:
1298 component, sep, levelStr = componentLevel.partition(
"=")
1300 levelStr, component = component,
None
1301 logLevelUpr = levelStr.upper()
1302 if logLevelUpr
in permittedLevelSet:
1303 logLevel = getattr(lsstLog.Log, logLevelUpr)
1305 parser.error(f
"loglevel={levelStr!r} not one of {permittedLevelList}")
1306 if component
is None:
1307 namespace.log.setLevel(logLevel)
1309 lsstLog.Log.getLogger(component).setLevel(logLevel)
1311 pyLevel = lsstLog.LevelTranslator.lsstLog2logging(logLevel)
1312 logging.getLogger(component).setLevel(pyLevel)
1316 """argparse action associated with ArgumentPraser.addReuseOption."""
1318 def __call__(self, parser, namespace, value, option_string):
1320 value = self.choices[-2]
1321 index = self.choices.index(value)
1322 namespace.reuse = self.choices[:index + 1]
1326 """Set an instance attribute (like `setattr` but accepting
1327 hierarchical names such as ``foo.bar.baz``).
1332 Object whose attribute is to be set.
1334 Name of attribute to set.
1336 New value for the attribute.
1340 For example if name is ``foo.bar.baz`` then ``item.foo.bar.baz``
1341 is set to the specified value.
1344 subnameList = name.split(
".")
1345 for subname
in subnameList[:-1]:
1346 subitem = getattr(subitem, subname)
1347 setattr(subitem, subnameList[-1], value)
1351 """Get an attribute (like `getattr` but accepts hierarchical names
1352 such as ``foo.bar.baz``).
1357 Object whose attribute is to be returned.
1359 Name of the attribute to get.
1364 If name is ``foo.bar.baz then the return value is
1365 ``item.foo.bar.baz``.
1368 for subname
in name.split(
"."):
1369 subitem = getattr(subitem, subname)
def convert_arg_line_to_args(self, arg_line)
def __init__(self, name, usage="%(prog)s input [options]", **kwargs)
def addReuseOption(self, choices)
def _processDataIds(self, namespace)
def _applyInitialOverrides(self, namespace)
def _parseDirectories(self, namespace)
def handleCamera(self, namespace)
def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True, ContainerClass=DataIdContainer)
def parse_args(self, config, args=None, log=None, override=None)
def getDatasetType(self, namespace)
def __call__(self, parser, namespace, values, option_string=None)
def __call__(self, parser, namespace, values, option_string)
def getDatasetType(self, namespace)
def isDynamicDatasetType(self)
def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer)
def setDatasetType(self, datasetType)
def castDataIds(self, butler)
def makeDataRefList(self, namespace)
def __init__(self, level=None)
def getDatasetType(self, namespace)
def addArgument(self, parser, idName)
def __init__(self, name=None, help="dataset type to process from input data repository", default=None)
def addArgument(self, parser, idName)
def getDatasetType(self, namespace)
def __call__(self, parser, namespace, values, option_string)
def __call__(self, parser, namespace, values, option_string)
def __call__(self, parser, namespace, values, option_string)
def __call__(self, parser, namespace, value, option_string)
def getDottedAttr(item, name)
def getTaskDict(config, taskDict=None, baseName="")
def setDottedAttr(item, name, value)
def obeyShowArgument(showOpts, config=None, exit=False)
def showTaskHierarchy(config)
std::string getPackageDir(std::string const &packageName)