22 __all__ = [
"ArgumentParser",
"ConfigFileAction",
"ConfigValueAction",
"DataIdContainer",
23 "DatasetArgument",
"ConfigDatasetType",
"InputOnlyArgumentParser"]
43 DEFAULT_INPUT_NAME =
"PIPE_INPUT_ROOT" 44 DEFAULT_CALIB_NAME =
"PIPE_CALIB_ROOT" 45 DEFAULT_OUTPUT_NAME =
"PIPE_OUTPUT_ROOT" 48 def _fixPath(defName, path):
49 """Apply environment variable as default root, if present, and abspath. 54 Name of environment variable containing default root path; 55 if the environment variable does not exist 56 then the path is relative to the current working directory 58 Path relative to default root path. 63 Path that has been expanded, or `None` if the environment variable 64 does not exist and path is `None`. 66 defRoot = os.environ.get(defName)
70 return os.path.abspath(path)
71 return os.path.abspath(os.path.join(defRoot, path
or ""))
75 """Container for data IDs and associated data references. 80 The lowest hierarchy level to descend to for this dataset type, 81 for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`. 82 Use `""` to use the mapper's default for the dataset type. 83 This class does not support `None`, but if it did, `None` 84 would mean the level should not be restricted. 88 Override this class for data IDs that require special handling to be 89 converted to ``data references``, and specify the override class 90 as ``ContainerClass`` for ``add_id_argument``. 92 If you don't want the argument parser to compute data references, 93 specify ``doMakeDataRefList=False`` in ``add_id_argument``. 98 """Dataset type of the data references (`str`). 101 """See parameter ``level`` (`str`). 104 """List of data IDs specified on the command line for the 105 appropriate data ID argument (`list` of `dict`). 108 """List of data references for the data IDs in ``idList`` 109 (`list` of `lsst.daf.persistence.ButlerDataRef`). 110 Elements will be omitted if the corresponding data is not found. 111 The list will be empty when returned by ``parse_args`` if 112 ``doMakeDataRefList=False`` was specified in ``add_id_argument``. 116 """Set actual dataset type, once it is known. 125 The reason ``datasetType`` is not a constructor argument is that 126 some subclasses do not know the dataset type until the command 127 is parsed. Thus, to reduce special cases in the code, 128 ``datasetType`` is always set after the command is parsed. 133 """Validate data IDs and cast them to the correct type 134 (modify idList in place). 136 This code casts the values in the data IDs dicts in `dataIdList` 137 to the type required by the butler. Data IDs are read from the 138 command line as `str`, but the butler requires some values to be 139 other types. For example "visit" values should be `int`. 143 butler : `lsst.daf.persistence.Butler` 147 raise RuntimeError(
"Must call setDatasetType first")
149 idKeyTypeDict = butler.getKeys(datasetType=self.
datasetType, level=self.
level)
150 except KeyError
as e:
151 msg =
"Cannot get keys for datasetType %s at level %s" % (self.
datasetType, self.
level)
152 raise KeyError(msg)
from e
154 for dataDict
in self.
idList:
155 for key, strVal
in dataDict.items():
157 keyType = idKeyTypeDict[key]
162 log = lsstLog.Log.getDefaultLogger()
163 log.warn(
"Unexpected ID %s; guessing type is \"%s\"" %
164 (key,
'str' if keyType == str
else keyType))
165 idKeyTypeDict[key] = keyType
169 castVal = keyType(strVal)
171 raise TypeError(
"Cannot cast value %r to %s for ID key %r" % (strVal, keyType, key,))
172 dataDict[key] = castVal
175 """Compute refList based on idList. 179 namespace : `argparse.Namespace` 180 Results of parsing command-line. The ``butler`` and ``log`` 181 elements must be set. 185 Not called if ``add_id_argument`` was called with 186 ``doMakeDataRefList=False``. 189 raise RuntimeError(
"Must call setDatasetType first")
190 butler = namespace.butler
191 for dataId
in self.
idList:
192 refList = dafPersist.searchDataRefs(butler, datasetType=self.
datasetType,
193 level=self.
level, dataId=dataId)
195 namespace.log.warn(
"No data found for dataId=%s", dataId)
201 """data ID argument, used by `ArgumentParser.add_id_argument`. 206 Name of identifier (argument name without dashes). 208 Type of dataset; specify a string for a fixed dataset type 209 or a `DatasetArgument` for a dynamic dataset type (e.g. 210 one specified by a command-line argument). 212 The lowest hierarchy level to descend to for this dataset type, 213 for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`. 214 Use `""` to use the mapper's default for the dataset type. 215 Some container classes may also support `None`, which means 216 the level should not be restricted; however the default class, 217 `DataIdContainer`, does not support `None`. 218 doMakeDataRefList : `bool`, optional 219 If `True` (default), construct data references. 220 ContainerClass : `class`, optional 221 Class to contain data IDs and data references; the default class 222 `DataIdContainer` will work for many, but not all, cases. 223 For example if the dataset type is specified on the command line 224 then use `DynamicDatasetType`. 227 def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer):
228 if name.startswith(
"-"):
229 raise RuntimeError(
"Name %s must not start with -" % (name,))
239 """`True` if the dataset type is dynamic (that is, specified 240 on the command line). 242 return isinstance(self.
datasetType, DynamicDatasetType)
245 """Get the dataset type as a string. 264 """Abstract base class for a dataset type determined from parsed 265 command-line arguments. 269 """Add a command-line argument to specify dataset type name, 274 parser : `ArgumentParser` 275 Argument parser to add the argument to. 277 Name of data ID argument, without the leading ``"--"``, 282 The default implementation does nothing 288 """Get the dataset type as a string, based on parsed command-line 296 raise NotImplementedError(
"Subclasses must override")
300 """Dataset type specified by a command-line argument. 304 name : `str`, optional 305 Name of command-line argument (including leading "--", 306 if appropriate) whose value is the dataset type. 307 If `None`, uses ``--idName_dstype`` where idName 308 is the name of the data ID argument (e.g. "id"). 309 help : `str`, optional 310 Help string for the command-line argument. 311 default : `object`, optional 312 Default value. If `None`, then the command-line option is required. 313 This argument isignored if the command-line argument is positional 314 (name does not start with "-") because positional arguments do 315 not support default values. 320 help="dataset type to process from input data repository",
323 DynamicDatasetType.__init__(self)
329 """Get the dataset type as a string, from the appropriate 330 command-line argument. 342 argName = self.
name.lstrip(
"-")
343 return getattr(namespace, argName)
346 """Add a command-line argument to specify the dataset type name. 350 parser : `ArgumentParser` 357 Also sets the `name` attribute if it is currently `None`. 359 help = self.
help if self.
help else "dataset type for %s" % (idName,)
360 if self.
name is None:
361 self.
name =
"--%s_dstype" % (idName,)
362 requiredDict = dict()
363 if self.
name.startswith(
"-"):
364 requiredDict = dict(required=self.
default is None)
373 """Dataset type specified by a config parameter. 378 Name of config option whose value is the dataset type. 382 DynamicDatasetType.__init__(self)
386 """Return the dataset type as a string, from the appropriate 391 namespace : `argparse.Namespace` 396 keyList = self.
name.split(
".")
397 value = namespace.config
400 value = getattr(value, key)
402 raise RuntimeError(
"Cannot find config parameter %r" % (self.
name,))
407 """Argument parser for command-line tasks that is based on 408 `argparse.ArgumentParser`. 413 Name of top-level task; used to identify camera-specific override 415 usage : `str`, optional 416 Command-line usage signature. 418 Additional keyword arguments for `argparse.ArgumentParser`. 422 Users may wish to add additional arguments before calling `parse_args`. 431 """Require an output directory to be specified (`bool`).""" 433 def __init__(self, name, usage="%(prog)s input [options]
", **kwargs): 436 argparse.ArgumentParser.__init__(self,
438 fromfile_prefix_chars=
'@',
439 epilog=textwrap.dedent(
"""Notes: 440 * --config, --configfile, --id, --loglevel and @file may appear multiple times; 441 all values are used, in order left to right 442 * @file reads command-line options from the specified file: 443 * data may be distributed among multiple lines (e.g. one option per line) 444 * data after # is treated as a comment and ignored 445 * blank lines and lines starting with # are ignored 446 * To specify multiple values for an option, do not use = after the option name: 447 * right: --configfile foo bar 448 * wrong: --configfile=foo bar 450 formatter_class=argparse.RawDescriptionHelpFormatter,
452 self.add_argument(metavar=
'input', dest=
"rawInput",
453 help=
"path to input data repository, relative to $%s" % (DEFAULT_INPUT_NAME,))
454 self.add_argument(
"--calib", dest=
"rawCalib",
455 help=
"path to input calibration repository, relative to $%s" %
456 (DEFAULT_CALIB_NAME,))
457 self.add_argument(
"--output", dest=
"rawOutput",
458 help=
"path to output data repository (need not exist), relative to $%s" %
459 (DEFAULT_OUTPUT_NAME,))
460 self.add_argument(
"--rerun", dest=
"rawRerun", metavar=
"[INPUT:]OUTPUT",
461 help=
"rerun name: sets OUTPUT to ROOT/rerun/OUTPUT; " 462 "optionally sets ROOT to ROOT/rerun/INPUT")
463 self.add_argument(
"-c",
"--config", nargs=
"*", action=ConfigValueAction,
464 help=
"config override(s), e.g. -c foo=newfoo bar.baz=3", metavar=
"NAME=VALUE")
465 self.add_argument(
"-C",
"--configfile", dest=
"configfile", nargs=
"*", action=ConfigFileAction,
466 help=
"config override file(s)")
467 self.add_argument(
"-L",
"--loglevel", nargs=
"*", action=LogLevelAction,
468 help=
"logging level; supported levels are [trace|debug|info|warn|error|fatal]",
469 metavar=
"LEVEL|COMPONENT=LEVEL")
470 self.add_argument(
"--longlog", action=
"store_true", help=
"use a more verbose format for the logging")
471 self.add_argument(
"--debug", action=
"store_true", help=
"enable debugging output?")
472 self.add_argument(
"--doraise", action=
"store_true",
473 help=
"raise an exception on error (else log a message and continue)?")
474 self.add_argument(
"--noExit", action=
"store_true",
475 help=
"Do not exit even upon failure (i.e. return a struct to the calling script)")
476 self.add_argument(
"--profile", help=
"Dump cProfile statistics to filename")
477 self.add_argument(
"--show", nargs=
"+", default=(),
478 help=
"display the specified information to stdout and quit " 479 "(unless run is specified).")
480 self.add_argument(
"-j",
"--processes", type=int, default=1, help=
"Number of processes to use")
481 self.add_argument(
"-t",
"--timeout", type=float,
482 help=
"Timeout for multiprocessing; maximum wall time (sec)")
483 self.add_argument(
"--clobber-output", action=
"store_true", dest=
"clobberOutput", default=
False,
484 help=(
"remove and re-create the output directory if it already exists " 485 "(safe with -j, but not all other forms of parallel execution)"))
486 self.add_argument(
"--clobber-config", action=
"store_true", dest=
"clobberConfig", default=
False,
487 help=(
"backup and then overwrite existing config files instead of checking them " 488 "(safe with -j, but not all other forms of parallel execution)"))
489 self.add_argument(
"--no-backup-config", action=
"store_true", dest=
"noBackupConfig", default=
False,
490 help=
"Don't copy config to file~N backup.")
491 self.add_argument(
"--clobber-versions", action=
"store_true", dest=
"clobberVersions", default=
False,
492 help=(
"backup and then overwrite existing package versions instead of checking" 493 "them (safe with -j, but not all other forms of parallel execution)"))
494 self.add_argument(
"--no-versions", action=
"store_true", dest=
"noVersions", default=
False,
495 help=
"don't check package versions; useful for development")
496 lsstLog.configure_prop(
""" 497 log4j.rootLogger=INFO, A1 498 log4j.appender.A1=ConsoleAppender 499 log4j.appender.A1.Target=System.out 500 log4j.appender.A1.layout=PatternLayout 501 log4j.appender.A1.layout.ConversionPattern=%c %p: %m%n 504 def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True,
505 ContainerClass=DataIdContainer):
506 """Add a data ID argument. 512 Data ID argument (including leading dashes, if wanted). 513 datasetType : `str` or `DynamicDatasetType`-type 514 Type of dataset. Supply a string for a fixed dataset type. 515 For a dynamically determined dataset type, supply 516 a `DynamicDatasetType`, such a `DatasetArgument`. 518 Help string for the argument. 520 The lowest hierarchy level to descend to for this dataset type, 521 for example `"amp"` for `"raw"` or `"ccd"` for `"calexp"`. 522 Use `""` to use the mapper's default for the dataset type. 523 Some container classes may also support `None`, which means 524 the level should not be restricted; however the default class, 525 `DataIdContainer`, does not support `None`. 526 doMakeDataRefList : bool, optional 527 If `True` (default), construct data references. 528 ContainerClass : `class`, optional 529 Class to contain data IDs and data references; the default class 530 `DataIdContainer` will work for many, but not all, cases. 531 For example if the dataset type is specified on the command line 532 then use `DynamicDatasetType`. 536 If ``datasetType`` is an instance of `DatasetArgument`, 537 then add a second argument to specify the dataset type. 539 The associated data is put into ``namespace.<dataIdArgument.name>`` 540 as an instance of `ContainerClass`; the container includes fields: 542 - ``idList``: a list of data ID dicts. 543 - ``refList``: a list of `~lsst.daf.persistence.Butler` 544 data references (empty if ``doMakeDataRefList`` is `False`). 546 argName = name.lstrip(
"-")
549 raise RuntimeError(
"Data ID argument %s already exists" % (name,))
550 if argName
in set((
"camera",
"config",
"butler",
"log",
"obsPkg")):
551 raise RuntimeError(
"Data ID argument %s is a reserved name" % (name,))
553 self.add_argument(name, nargs=
"*", action=IdValueAction, help=help,
554 metavar=
"KEY=VALUE1[^VALUE2[^VALUE3...]")
558 datasetType=datasetType,
560 doMakeDataRefList=doMakeDataRefList,
561 ContainerClass=ContainerClass,
564 if dataIdArgument.isDynamicDatasetType:
565 datasetType.addArgument(parser=self, idName=argName)
569 def parse_args(self, config, args=None, log=None, override=None):
570 """Parse arguments for a command-line task. 574 config : `lsst.pex.config.Config` 575 Config for the task being run. 576 args : `list`, optional 577 Argument list; if `None` then ``sys.argv[1:]`` is used. 578 log : `lsst.log.Log`, optional 579 `~lsst.log.Log` instance; if `None` use the default log. 580 override : callable, optional 581 A config override function. It must take the root config object 582 as its only argument and must modify the config in place. 583 This function is called after camera-specific overrides files 584 are applied, and before command-line config overrides 585 are applied (thus allowing the user the final word). 589 namespace : `argparse.Namespace` 590 A `~argparse.Namespace` instance containing fields: 592 - ``camera``: camera name. 593 - ``config``: the supplied config with all overrides applied, 594 validated and frozen. 595 - ``butler``: a `lsst.daf.persistence.Butler` for the data. 596 - An entry for each of the data ID arguments registered by 597 `add_id_argument`, of the type passed to its ``ContainerClass`` 598 keyword (`~lsst.pipe.base.DataIdContainer` by default). It 599 includes public elements ``idList`` and ``refList``. 600 - ``log``: a `lsst.log` Log. 601 - An entry for each command-line argument, 602 with the following exceptions: 604 - config is the supplied config, suitably updated. 605 - configfile, id and loglevel are all missing. 606 - ``obsPkg``: name of the ``obs_`` package for this camera. 611 if len(args) < 1
or args[0].startswith(
"-")
or args[0].startswith(
"@"):
613 if len(args) == 1
and args[0]
in (
"-h",
"--help"):
616 self.exit(
"%s: error: Must specify input as first argument" % self.prog)
620 namespace = argparse.Namespace()
621 namespace.input = _fixPath(DEFAULT_INPUT_NAME, args[0])
622 if not os.path.isdir(namespace.input):
623 self.error(
"Error: input=%r not found" % (namespace.input,))
625 namespace.config = config
626 namespace.log = log
if log
is not None else lsstLog.Log.getDefaultLogger()
627 mapperClass = dafPersist.Butler.getMapperClass(namespace.input)
628 namespace.camera = mapperClass.getCameraName()
629 namespace.obsPkg = mapperClass.getPackageName()
634 if override
is not None:
635 override(namespace.config)
639 setattr(namespace, dataIdArgument.name, dataIdArgument.ContainerClass(level=dataIdArgument.level))
641 namespace = argparse.ArgumentParser.parse_args(self, args=args, namespace=namespace)
642 del namespace.configfile
646 if namespace.clobberOutput:
647 if namespace.output
is None:
648 self.error(
"--clobber-output is only valid with --output or --rerun")
649 elif namespace.output == namespace.input:
650 self.error(
"--clobber-output is not valid when the output and input repos are the same")
651 if os.path.exists(namespace.output):
652 namespace.log.info(
"Removing output repo %s for --clobber-output", namespace.output)
653 shutil.rmtree(namespace.output)
655 namespace.log.debug(
"input=%s", namespace.input)
656 namespace.log.debug(
"calib=%s", namespace.calib)
657 namespace.log.debug(
"output=%s", namespace.output)
662 if self.
requireOutput and namespace.output
is None and namespace.rerun
is None:
663 self.error(
"no output directory specified.\n" 664 "An output directory must be specified with the --output or --rerun\n" 665 "command-line arguments.\n")
669 butlerArgs = {
'mapperArgs': {
'calibRoot': namespace.calib}}
671 outputs = {
'root': namespace.output,
'mode':
'rw'}
672 inputs = {
'root': namespace.input}
673 inputs.update(butlerArgs)
674 outputs.update(butlerArgs)
675 namespace.butler = dafPersist.Butler(inputs=inputs, outputs=outputs)
677 outputs = {
'root': namespace.input,
'mode':
'rw'}
678 outputs.update(butlerArgs)
679 namespace.butler = dafPersist.Butler(outputs=outputs)
686 if "data" in namespace.show:
688 for dataRef
in getattr(namespace, dataIdName).refList:
689 print(
"%s dataRef.dataId = %s" % (dataIdName, dataRef.dataId))
691 if namespace.show
and "run" not in namespace.show:
699 sys.stderr.write(
"Warning: no 'debug' module found\n")
700 namespace.debug =
False 702 del namespace.loglevel
704 if namespace.longlog:
705 lsstLog.configure_prop(
""" 706 log4j.rootLogger=INFO, A1 707 log4j.appender.A1=ConsoleAppender 708 log4j.appender.A1.Target=System.out 709 log4j.appender.A1.layout=PatternLayout 710 log4j.appender.A1.layout.ConversionPattern=%-5p %d{yyyy-MM-ddThh:mm:ss.sss} %c (%X{LABEL})(%F:%L)- %m%n 712 del namespace.longlog
714 namespace.config.validate()
715 namespace.config.freeze()
719 def _parseDirectories(self, namespace):
720 """Parse input, output and calib directories 722 This allows for hacking the directories, e.g., to include a 724 Modifications are made to the 'namespace' object in-place. 726 mapperClass = dafPersist.Butler.getMapperClass(_fixPath(DEFAULT_INPUT_NAME, namespace.rawInput))
727 namespace.calib = _fixPath(DEFAULT_CALIB_NAME, namespace.rawCalib)
730 if namespace.rawOutput:
731 namespace.output = _fixPath(DEFAULT_OUTPUT_NAME, namespace.rawOutput)
733 namespace.output =
None 741 if namespace.rawRerun:
743 self.error(
"Error: cannot specify both --output and --rerun")
744 namespace.rerun = namespace.rawRerun.split(
":")
745 rerunDir = [os.path.join(namespace.input,
"rerun", dd)
for dd
in namespace.rerun]
746 modifiedInput =
False 747 if len(rerunDir) == 2:
748 namespace.input, namespace.output = rerunDir
750 elif len(rerunDir) == 1:
751 namespace.output = rerunDir[0]
752 if os.path.exists(os.path.join(namespace.output,
"_parent")):
753 namespace.input = os.path.realpath(os.path.join(namespace.output,
"_parent"))
756 self.error(
"Error: invalid argument for --rerun: %s" % namespace.rerun)
757 if modifiedInput
and dafPersist.Butler.getMapperClass(namespace.input) != mapperClass:
758 self.error(
"Error: input directory specified by --rerun must have the same mapper as INPUT")
760 namespace.rerun =
None 761 del namespace.rawInput
762 del namespace.rawCalib
763 del namespace.rawOutput
764 del namespace.rawRerun
766 def _processDataIds(self, namespace):
767 """Process the parsed data for each data ID argument in an 768 `~argparse.Namespace`. 772 - Validate data ID keys. 773 - Cast the data ID values to the correct type. 774 - Compute data references from data IDs. 778 namespace : `argparse.Namespace` 779 Parsed namespace. These attributes are read: 783 - ``config``, if any dynamic dataset types are set by 785 - Dataset type arguments (e.g. ``id_dstype``), if any dynamic 786 dataset types are specified by such 788 These attributes are modified: 790 - ``<name>`` for each data ID argument registered using 791 `add_id_argument` with name ``<name>``. 794 dataIdContainer = getattr(namespace, dataIdArgument.name)
795 dataIdContainer.setDatasetType(dataIdArgument.getDatasetType(namespace))
796 if dataIdArgument.doMakeDataRefList:
798 dataIdContainer.castDataIds(butler=namespace.butler)
799 except (KeyError, TypeError)
as e:
805 dataIdContainer.makeDataRefList(namespace)
807 def _applyInitialOverrides(self, namespace):
808 """Apply obs-package-specific and camera-specific config 809 override files, if found 813 namespace : `argparse.Namespace` 814 Parsed namespace. These attributes are read: 818 Look in the package namespace.obsPkg for files: 820 - ``config/<task_name>.py`` 821 - ``config/<camera_name>/<task_name>.py`` and load if found. 824 fileName = self.
_name +
".py" 826 os.path.join(obsPkgDir,
"config", fileName),
827 os.path.join(obsPkgDir,
"config", namespace.camera, fileName),
829 if os.path.exists(filePath):
830 namespace.log.info(
"Loading config overrride file %r", filePath)
831 namespace.config.load(filePath)
833 namespace.log.debug(
"Config override file does not exist: %r", filePath)
836 """Perform camera-specific operations before parsing the command-line. 840 namespace : `argparse.Namespace` 841 Namespace (an ) with the following fields: 843 - ``camera``: the camera name. 844 - ``config``: the config passed to parse_args, with no overrides applied. 845 - ``obsPkg``: the ``obs_`` package for this camera. 846 - ``log``: a `lsst.log` Log. 850 The default implementation does nothing. 855 """Allow files of arguments referenced by ``@<path>`` to contain 856 multiple values on each line. 861 Line of text read from an argument file. 863 arg_line = arg_line.strip()
864 if not arg_line
or arg_line.startswith(
"#"):
866 for arg
in shlex.split(arg_line, comments=
True, posix=
True):
872 """Add a "--reuse-outputs-from SUBTASK" option to the argument 875 CmdLineTasks that can be restarted at an intermediate step using 876 outputs from earlier (but still internal) steps should use this 877 method to allow the user to control whether that happens when 878 outputs from earlier steps are present. 883 A sequence of string names (by convention, top-level subtasks) 884 that identify the steps that could be skipped when their 885 outputs are already present. The list is ordered, so when the 886 user specifies one step on the command line, all previous steps 887 may be skipped as well. In addition to the choices provided, 888 users may pass "all" to indicate that all steps may be thus 891 When this method is called, the ``namespace`` object returned by 892 ``parse_args`` will contain a ``reuse`` attribute containing 893 a list of all steps that should be skipped if their outputs 895 If no steps should be skipped, the ``reuse`` will be an empty list. 897 choices = list(choices)
898 choices.append(
"all")
899 self.add_argument(
"--reuse-outputs-from", dest=
"reuse", choices=choices,
900 default=[], action=ReuseAction,
901 help=(
"Skip the given subtask and its predecessors and reuse their outputs " 902 "if those outputs already exist. Use 'all' to specify all subtasks."))
906 """`ArgumentParser` for command-line tasks that don't write any output. 909 requireOutput =
False 913 """Get a dictionary of task info for all subtasks in a config 917 config : `lsst.pex.config.Config` 918 Configuration to process. 919 taskDict : `dict`, optional 920 Users should not specify this argument. Supports recursion. 921 If provided, taskDict is updated in place, else a new `dict` 923 baseName : `str`, optional 924 Users should not specify this argument. It is only used for 925 recursion: if a non-empty string then a period is appended 926 and the result is used as a prefix for additional entries 927 in taskDict; otherwise no prefix is used. 932 Keys are config field names, values are task names. 936 This function is designed to be called recursively. 937 The user should call with only a config (leaving taskDict and baseName 938 at their default values). 942 for fieldName, field
in config.items():
943 if hasattr(field,
"value")
and hasattr(field,
"target"):
944 subConfig = field.value
945 if isinstance(subConfig, pexConfig.Config):
946 subBaseName =
"%s.%s" % (baseName, fieldName)
if baseName
else fieldName
948 taskName =
"%s.%s" % (field.target.__module__, field.target.__name__)
950 taskName = repr(field.target)
951 taskDict[subBaseName] = taskName
952 getTaskDict(config=subConfig, taskDict=taskDict, baseName=subBaseName)
957 """Process arguments specified with ``--show`` (but ignores 962 showOpts : `list` of `str` 963 List of options passed to ``--show``. 966 exit : bool, optional 967 Exit if ``"run"`` isn't included in ``showOpts``. 971 Supports the following options in showOpts: 973 - ``config[=PAT]``. Dump all the config entries, or just the ones that 974 match the glob pattern. 975 - ``history=PAT``. Show where the config entries that match the glob 977 - ``tasks``. Show task hierarchy. 978 - ``data``. Ignored; to be processed by caller. 979 - ``run``. Keep going (the default behaviour is to exit if 980 ``--show`` is specified). 982 Calls ``sys.exit(1)`` if any other option found. 987 for what
in showOpts:
988 showCommand, showArgs = what.split(
"=", 1)
if "=" in what
else (what,
"")
990 if showCommand ==
"config":
991 matConfig = re.search(
r"^(?:config.)?(.+)?", showArgs)
992 pattern = matConfig.group(1)
994 class FilteredStream:
995 """A file object that only prints lines 996 that match the glob "pattern". 998 N.b. Newlines are silently discarded and reinserted; 1002 def __init__(self, pattern):
1004 mat = re.search(
r"(.*):NOIGNORECASE$", pattern)
1007 pattern = mat.group(1)
1008 self._pattern = re.compile(fnmatch.translate(pattern))
1010 if pattern != pattern.lower():
1011 print(
u"Matching \"%s\" without regard to case " 1012 "(append :NOIGNORECASE to prevent this)" % (pattern,), file=sys.stdout)
1013 self._pattern = re.compile(fnmatch.translate(pattern), re.IGNORECASE)
1015 def write(self, showStr):
1016 showStr = showStr.rstrip()
1019 matchStr = showStr.split(
"\n")[-1].split(
"=")[0]
1020 if self._pattern.search(matchStr):
1021 print(
u"\n" + showStr)
1023 fd = FilteredStream(pattern)
1027 config.saveToStream(fd,
"config")
1028 elif showCommand ==
"history":
1029 matHistory = re.search(
r"^(?:config.)?(.+)?", showArgs)
1030 globPattern = matHistory.group(1)
1032 print(
"Please provide a value with --show history (e.g. history=*.doXXX)", file=sys.stderr)
1036 for i, pattern
in enumerate(fnmatch.filter(config.names(), globPattern)):
1040 pattern = pattern.split(
".")
1041 cpath, cname = pattern[:-1], pattern[-1]
1043 for i, cpt
in enumerate(cpath):
1045 hconfig = getattr(hconfig, cpt)
1046 except AttributeError:
1047 print(
"Error: configuration %s has no subconfig %s" %
1048 (
".".join([
"config"] + cpath[:i]), cpt), file=sys.stderr)
1052 print(pexConfig.history.format(hconfig, cname))
1054 print(
"Error: %s has no field %s" % (
".".join([
"config"] + cpath), cname),
1061 elif showCommand ==
"data":
1063 elif showCommand ==
"run":
1065 elif showCommand ==
"tasks":
1068 print(
u"Unknown value for show: %s (choose from '%s')" %
1069 (what,
"', '".join(
"config[=XXX] data history=XXX tasks run".split())), file=sys.stderr)
1072 if exit
and "run" not in showOpts:
1077 """Print task hierarchy to stdout. 1081 config : `lsst.pex.config.Config` 1082 Configuration to process. 1087 fieldNameList = sorted(taskDict.keys())
1088 for fieldName
in fieldNameList:
1089 taskName = taskDict[fieldName]
1090 print(
u"%s: %s" % (fieldName, taskName))
1094 """argparse action callback to override config parameters using 1095 name=value pairs from the command-line. 1098 def __call__(self, parser, namespace, values, option_string):
1099 """Override one or more config name value pairs. 1103 parser : `argparse.ArgumentParser` 1105 namespace : `argparse.Namespace` 1106 Parsed command. The ``namespace.config`` attribute is updated. 1108 A list of ``configItemName=value`` pairs. 1109 option_string : `str` 1110 Option value specified by the user. 1112 if namespace.config
is None:
1114 for nameValue
in values:
1115 name, sep, valueStr = nameValue.partition(
"=")
1117 parser.error(
"%s value %s must be in form name=value" % (option_string, nameValue))
1122 except AttributeError:
1123 parser.error(
"no config field: %s" % (name,))
1126 value = eval(valueStr, {})
1128 parser.error(
"cannot parse %r as a value for %s" % (valueStr, name))
1131 except Exception
as e:
1132 parser.error(
"cannot set config.%s=%r: %s" % (name, value, e))
1136 """argparse action to load config overrides from one or more files. 1139 def __call__(self, parser, namespace, values, option_string=None):
1140 """Load one or more files of config overrides. 1144 parser : `argparse.ArgumentParser` 1146 namespace : `argparse.Namespace` 1147 Parsed command. The following attributes are updated by this 1148 method: ``namespace.config``. 1150 A list of data config file paths. 1151 option_string : `str`, optional 1152 Option value specified by the user. 1154 if namespace.config
is None:
1156 for configfile
in values:
1158 namespace.config.load(configfile)
1159 except Exception
as e:
1160 parser.error(
"cannot load config file %r: %s" % (configfile, e))
1164 """argparse action callback to process a data ID into a dict. 1167 def __call__(self, parser, namespace, values, option_string):
1168 """Parse ``--id`` data and append results to 1169 ``namespace.<argument>.idList``. 1173 parser : `ArgumentParser` 1175 namespace : `argparse.Namespace` 1176 Parsed command (an instance of argparse.Namespace). 1177 The following attributes are updated: 1179 - ``<idName>.idList``, where ``<idName>`` is the name of the 1180 ID argument, for instance ``"id"`` for ID argument ``--id``. 1182 A list of data IDs; see Notes below. 1183 option_string : `str` 1184 Option value specified by the user. 1188 The data format is:: 1190 key1=value1_1[^value1_2[^value1_3...] 1191 key2=value2_1[^value2_2[^value2_3...]... 1193 The values (e.g. ``value1_1``) may either be a string, 1194 or of the form ``"int..int"`` (e.g. ``"1..3"``) which is 1195 interpreted as ``"1^2^3"`` (inclusive, unlike a python range). 1196 So ``"0^2..4^7..9"`` is equivalent to ``"0^2^3^4^7^8^9"``. 1197 You may also specify a stride: ``"1..5:2"`` is ``"1^3^5"``. 1199 The cross product is computed for keys with multiple values. 1202 --id visit 1^2 ccd 1,1^2,2 1204 results in the following data ID dicts being appended to 1205 ``namespace.<argument>.idList``: 1207 {"visit":1, "ccd":"1,1"} 1208 {"visit":2, "ccd":"1,1"} 1209 {"visit":1, "ccd":"2,2"} 1210 {"visit":2, "ccd":"2,2"} 1212 if namespace.config
is None:
1214 idDict = collections.OrderedDict()
1215 for nameValue
in values:
1216 name, sep, valueStr = nameValue.partition(
"=")
1218 parser.error(
"%s appears multiple times in one ID argument: %s" % (name, option_string))
1220 for v
in valueStr.split(
"^"):
1221 mat = re.search(
r"^(\d+)\.\.(\d+)(?::(\d+))?$", v)
1223 v1 = int(mat.group(1))
1224 v2 = int(mat.group(2))
1226 v3 = int(v3)
if v3
else 1
1227 for v
in range(v1, v2 + 1, v3):
1228 idDict[name].append(str(v))
1230 idDict[name].append(v)
1232 iterList = [idDict[key]
for key
in idDict.keys()]
1233 idDictList = [collections.OrderedDict(zip(idDict.keys(), valList))
1234 for valList
in itertools.product(*iterList)]
1236 argName = option_string.lstrip(
"-")
1237 ident = getattr(namespace, argName)
1238 ident.idList += idDictList
1242 """argparse action to set log level. 1245 def __call__(self, parser, namespace, values, option_string):
1250 parser : `ArgumentParser` 1252 namespace : `argparse.Namespace` 1253 Parsed command. This argument is not used. 1255 List of trace levels; each item must be of the form 1256 ``component_name=level`` or ``level``, where ``level`` 1257 is a keyword (not case sensitive) or an integer. 1258 option_string : `str` 1259 Option value specified by the user. 1261 permittedLevelList = (
'TRACE',
'DEBUG',
'INFO',
'WARN',
'ERROR',
'FATAL')
1262 permittedLevelSet = set(permittedLevelList)
1263 for componentLevel
in values:
1264 component, sep, levelStr = componentLevel.partition(
"=")
1266 levelStr, component = component,
None 1267 logLevelUpr = levelStr.upper()
1268 if logLevelUpr
in permittedLevelSet:
1269 logLevel = getattr(lsstLog.Log, logLevelUpr)
1271 parser.error(
"loglevel=%r not one of %s" % (levelStr, permittedLevelList))
1272 if component
is None:
1273 namespace.log.setLevel(logLevel)
1275 lsstLog.Log.getLogger(component).setLevel(logLevel)
1279 """argparse action associated with ArgumentPraser.addReuseOption.""" 1281 def __call__(self, parser, namespace, value, option_string):
1283 value = self.choices[-2]
1284 index = self.choices.index(value)
1285 namespace.reuse = self.choices[:index + 1]
1289 """Set an instance attribute (like `setattr` but accepting 1290 hierarchical names such as ``foo.bar.baz``). 1295 Object whose attribute is to be set. 1297 Name of attribute to set. 1299 New value for the attribute. 1303 For example if name is ``foo.bar.baz`` then ``item.foo.bar.baz`` 1304 is set to the specified value. 1307 subnameList = name.split(
".")
1308 for subname
in subnameList[:-1]:
1309 subitem = getattr(subitem, subname)
1310 setattr(subitem, subnameList[-1], value)
1314 """Get an attribute (like `getattr` but accepts hierarchical names 1315 such as ``foo.bar.baz``). 1320 Object whose attribute is to be returned. 1322 Name of the attribute to get. 1327 If name is ``foo.bar.baz then the return value is 1328 ``item.foo.bar.baz``. 1331 for subname
in name.split(
"."):
1332 subitem = getattr(subitem, subname)
def addArgument(self, parser, idName)
def getTaskDict(config, taskDict=None, baseName="")
def __init__(self, level=None)
def getDottedAttr(item, name)
def __init__(self, name=None, help="dataset type to process from input data repository", default=None)
def getDatasetType(self, namespace)
def addArgument(self, parser, idName)
def makeDataRefList(self, namespace)
def setDottedAttr(item, name, value)
def __call__(self, parser, namespace, values, option_string=None)
def getDatasetType(self, namespace)
def obeyShowArgument(showOpts, config=None, exit=False)
def addReuseOption(self, choices)
def getDatasetType(self, namespace)
std::string getPackageDir(std::string const &packageName)
def _processDataIds(self, namespace)
def getDatasetType(self, namespace)
def convert_arg_line_to_args(self, arg_line)
def isDynamicDatasetType(self)
def handleCamera(self, namespace)
def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer)
def __call__(self, parser, namespace, value, option_string)
def _applyInitialOverrides(self, namespace)
def castDataIds(self, butler)
def __init__(self, name, usage="%(prog)s input [options]", kwargs)
def __call__(self, parser, namespace, values, option_string)
def setDatasetType(self, datasetType)
def showTaskHierarchy(config)
def __call__(self, parser, namespace, values, option_string)
def __call__(self, parser, namespace, values, option_string)
def parse_args(self, config, args=None, log=None, override=None)
def _parseDirectories(self, namespace)
def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True, ContainerClass=DataIdContainer)