22 from __future__
import absolute_import, division, print_function
35 from builtins
import zip
36 from builtins
import str
37 from builtins
import range
38 from builtins
import object
41 import lsst.pex.config
as pexConfig
42 import lsst.pex.config.history
43 import lsst.log
as lsstLog
44 import lsst.daf.persistence
as dafPersist
45 from future.utils
import with_metaclass
47 __all__ = [
"ArgumentParser",
"ConfigFileAction",
"ConfigValueAction",
"DataIdContainer",
48 "DatasetArgument",
"ConfigDatasetType",
"InputOnlyArgumentParser"]
50 DEFAULT_INPUT_NAME =
"PIPE_INPUT_ROOT" 51 DEFAULT_CALIB_NAME =
"PIPE_CALIB_ROOT" 52 DEFAULT_OUTPUT_NAME =
"PIPE_OUTPUT_ROOT" 55 def _fixPath(defName, path):
56 """!Apply environment variable as default root, if present, and abspath 58 @param[in] defName name of environment variable containing default root path; 59 if the environment variable does not exist then the path is relative 60 to the current working directory 61 @param[in] path path relative to default root path 62 @return abspath: path that has been expanded, or None if the environment variable does not exist 65 defRoot = os.environ.get(defName)
69 return os.path.abspath(path)
70 return os.path.abspath(os.path.join(defRoot, path
or ""))
74 """!A container for data IDs and associated data references 76 Override for data IDs that require special handling to be converted to data references, 77 and specify the override class as ContainerClass for add_id_argument. 78 (If you don't want the argument parser to compute data references, you may use this class 79 and specify doMakeDataRefList=False in add_id_argument.) 83 """!Construct a DataIdContainer""" 90 """!Set actual dataset type, once it is known""" 94 """!Validate data IDs and cast them to the correct type (modify idList in place). 96 @param[in] butler data butler (a \ref lsst.daf.persistence.butler.Butler 97 "lsst.daf.persistence.Butler") 100 raise RuntimeError(
"Must call setDatasetType first")
102 idKeyTypeDict = butler.getKeys(datasetType=self.
datasetType, level=self.
level)
104 raise KeyError(
"Cannot get keys for datasetType %s at level %s" % (self.
datasetType, self.
level))
106 for dataDict
in self.
idList:
107 for key, strVal
in dataDict.items():
109 keyType = idKeyTypeDict[key]
114 log = lsstLog.Log.getDefaultLogger()
115 log.warn(
"Unexpected ID %s; guessing type is \"%s\"" %
116 (key,
'str' if keyType == str
else keyType))
117 idKeyTypeDict[key] = keyType
121 castVal = keyType(strVal)
123 raise TypeError(
"Cannot cast value %r to %s for ID key %r" % (strVal, keyType, key,))
124 dataDict[key] = castVal
127 """!Compute refList based on idList 129 Not called if add_id_argument called with doMakeDataRefList=False 131 @param[in] namespace results of parsing command-line (with 'butler' and 'log' elements) 134 raise RuntimeError(
"Must call setDatasetType first")
135 butler = namespace.butler
136 for dataId
in self.
idList:
137 refList = list(butler.subset(datasetType=self.
datasetType, level=self.
level, dataId=dataId))
143 namespace.log.warn(
"No data found for dataId=%s", dataId)
149 """!Glorified struct for data about id arguments, used by ArgumentParser.add_id_argument""" 151 def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer):
154 @param[in] name name of identifier (argument name without dashes) 155 @param[in] datasetType type of dataset; specify a string for a fixed dataset type 156 or a DatasetArgument for a dynamic dataset type (e.g. one specified by a command-line argument) 157 @param[in] level level of dataset, for butler 158 @param[in] doMakeDataRefList construct data references? 159 @param[in] ContainerClass class to contain data IDs and data references; 160 the default class will work for many kinds of data, but you may have to override 161 to compute some kinds of data references. 163 if name.startswith(
"-"):
164 raise RuntimeError(
"Name %s must not start with -" % (name,))
174 """!Is the dataset type dynamic (specified on the command line)?""" 175 return isinstance(self.
datasetType, DynamicDatasetType)
178 """!Return the dataset type as a string 180 @param[in] namespace parsed command 189 """!Abstract base class for a dataset type determined from parsed command-line arguments 193 """!Add a command-line argument to specify dataset type name, if wanted 195 @param[in] parser argument parser to which to add argument 196 @param[in] idName name of data ID argument, without the leading "--", e.g. "id" 198 The default implementation does nothing 204 """Return the dataset type as a string, based on parsed command-line arguments 206 @param[in] namespace parsed command 208 raise NotImplementedError(
"Subclasses must override")
212 """!A dataset type specified by a command-line argument. 217 help="dataset type to process from input data repository",
220 """!Construct a DatasetArgument 222 @param[in] name name of command-line argument (including leading "--", if appropriate) 223 whose value is the dataset type; if None, uses --idName_dstype 224 where idName is the name of the data ID argument (e.g. "id") 225 @param[in] help help string for the command-line argument 226 @param[in] default default value; if None, then the command-line option is required; 227 ignored if the argument is positional (name does not start with "-") 228 because positional argument do not support default values 230 DynamicDatasetType.__init__(self)
236 """Return the dataset type as a string, from the appropriate command-line argument 238 @param[in] namespace parsed command 240 argName = self.
name.lstrip(
"-")
241 return getattr(namespace, argName)
244 """!Add a command-line argument to specify dataset type name 246 Also set self.name if it is None 248 help = self.
help if self.
help else "dataset type for %s" % (idName,)
249 if self.
name is None:
250 self.
name =
"--%s_dstype" % (idName,)
251 requiredDict = dict()
252 if self.
name.startswith(
"-"):
253 requiredDict = dict(required=self.
default is None)
262 """!A dataset type specified by a config parameter 266 """!Construct a ConfigDatasetType 268 @param[in] name name of config option whose value is the dataset type 270 DynamicDatasetType.__init__(self)
274 """Return the dataset type as a string, from the appropriate config field 276 @param[in] namespace parsed command 280 keyList = self.
name.split(
".")
281 value = namespace.config
284 value = getattr(value, key)
286 raise RuntimeError(
"Cannot find config parameter %r" % (self.
name,))
291 """!An argument parser for pipeline tasks that is based on argparse.ArgumentParser 293 Users may wish to add additional arguments before calling parse_args. 296 - I would prefer to check data ID keys and values as they are parsed, 297 but the required information comes from the butler, so I have to construct a butler 298 before I do this checking. Constructing a butler is slow, so I only want do it once, 299 after parsing the command line, so as to catch syntax errors quickly. 303 def __init__(self, name, usage="%(prog)s input [options]
", **kwargs): 304 """!Construct an ArgumentParser 306 @param[in] name name of top-level task; used to identify camera-specific override files 307 @param[in] usage usage string 308 @param[in] **kwargs additional keyword arguments for argparse.ArgumentParser 312 argparse.ArgumentParser.__init__(self,
314 fromfile_prefix_chars=
'@',
315 epilog=textwrap.dedent(
"""Notes: 316 * --config, --configfile, --id, --loglevel and @file may appear multiple times; 317 all values are used, in order left to right 318 * @file reads command-line options from the specified file: 319 * data may be distributed among multiple lines (e.g. one option per line) 320 * data after # is treated as a comment and ignored 321 * blank lines and lines starting with # are ignored 322 * To specify multiple values for an option, do not use = after the option name: 323 * right: --configfile foo bar 324 * wrong: --configfile=foo bar 326 formatter_class=argparse.RawDescriptionHelpFormatter,
328 self.add_argument(metavar=
'input', dest=
"rawInput",
329 help=
"path to input data repository, relative to $%s" % (DEFAULT_INPUT_NAME,))
330 self.add_argument(
"--calib", dest=
"rawCalib",
331 help=
"path to input calibration repository, relative to $%s" %
332 (DEFAULT_CALIB_NAME,))
333 self.add_argument(
"--output", dest=
"rawOutput",
334 help=
"path to output data repository (need not exist), relative to $%s" %
335 (DEFAULT_OUTPUT_NAME,))
336 self.add_argument(
"--rerun", dest=
"rawRerun", metavar=
"[INPUT:]OUTPUT",
337 help=
"rerun name: sets OUTPUT to ROOT/rerun/OUTPUT; " 338 "optionally sets ROOT to ROOT/rerun/INPUT")
339 self.add_argument(
"-c",
"--config", nargs=
"*", action=ConfigValueAction,
340 help=
"config override(s), e.g. -c foo=newfoo bar.baz=3", metavar=
"NAME=VALUE")
341 self.add_argument(
"-C",
"--configfile", dest=
"configfile", nargs=
"*", action=ConfigFileAction,
342 help=
"config override file(s)")
343 self.add_argument(
"-L",
"--loglevel", nargs=
"*", action=LogLevelAction,
344 help=
"logging level; supported levels are [trace|debug|info|warn|error|fatal]",
345 metavar=
"LEVEL|COMPONENT=LEVEL")
346 self.add_argument(
"--longlog", action=
"store_true", help=
"use a more verbose format for the logging")
347 self.add_argument(
"--debug", action=
"store_true", help=
"enable debugging output?")
348 self.add_argument(
"--doraise", action=
"store_true",
349 help=
"raise an exception on error (else log a message and continue)?")
350 self.add_argument(
"--noExit", action=
"store_true",
351 help=
"Do not exit even upon failure (i.e. return a struct to the calling script)")
352 self.add_argument(
"--profile", help=
"Dump cProfile statistics to filename")
353 self.add_argument(
"--show", nargs=
"+", default=(),
354 help=
"display the specified information to stdout and quit " 355 "(unless run is specified).")
356 self.add_argument(
"-j",
"--processes", type=int, default=1, help=
"Number of processes to use")
357 self.add_argument(
"-t",
"--timeout", type=float,
358 help=
"Timeout for multiprocessing; maximum wall time (sec)")
359 self.add_argument(
"--clobber-output", action=
"store_true", dest=
"clobberOutput", default=
False,
360 help=(
"remove and re-create the output directory if it already exists " 361 "(safe with -j, but not all other forms of parallel execution)"))
362 self.add_argument(
"--clobber-config", action=
"store_true", dest=
"clobberConfig", default=
False,
363 help=(
"backup and then overwrite existing config files instead of checking them " 364 "(safe with -j, but not all other forms of parallel execution)"))
365 self.add_argument(
"--no-backup-config", action=
"store_true", dest=
"noBackupConfig", default=
False,
366 help=
"Don't copy config to file~N backup.")
367 self.add_argument(
"--clobber-versions", action=
"store_true", dest=
"clobberVersions", default=
False,
368 help=(
"backup and then overwrite existing package versions instead of checking" 369 "them (safe with -j, but not all other forms of parallel execution)"))
370 self.add_argument(
"--no-versions", action=
"store_true", dest=
"noVersions", default=
False,
371 help=
"don't check package versions; useful for development")
372 lsstLog.configure_prop(
""" 373 log4j.rootLogger=INFO, A1 374 log4j.appender.A1=ConsoleAppender 375 log4j.appender.A1.Target=System.err 376 log4j.appender.A1.layout=PatternLayout 377 log4j.appender.A1.layout.ConversionPattern=%c %p: %m%n 380 def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True,
381 ContainerClass=DataIdContainer):
382 """!Add a data ID argument 384 Add an argument to specify data IDs. If datasetType is an instance of DatasetArgument, 385 then add a second argument to specify the dataset type. 387 @param[in] name data ID argument (including leading dashes, if wanted) 388 @param[in] datasetType type of dataset; supply a string for a fixed dataset type, 389 or a DynamicDatasetType, such as DatasetArgument, for a dynamically determined dataset type 390 @param[in] help help string for the argument 391 @param[in] level level of dataset, for butler 392 @param[in] doMakeDataRefList construct data references? 393 @param[in] ContainerClass data ID container class to use to contain results; 394 override the default if you need a special means of computing data references from data IDs 396 The associated data is put into namespace.<dataIdArgument.name> as an instance of ContainerClass; 397 the container includes fields: 398 - idList: a list of data ID dicts 399 - refList: a list of butler data references (empty if doMakeDataRefList false) 401 argName = name.lstrip(
"-")
404 raise RuntimeError(
"Data ID argument %s already exists" % (name,))
405 if argName
in set((
"camera",
"config",
"butler",
"log",
"obsPkg")):
406 raise RuntimeError(
"Data ID argument %s is a reserved name" % (name,))
408 self.add_argument(name, nargs=
"*", action=IdValueAction, help=help,
409 metavar=
"KEY=VALUE1[^VALUE2[^VALUE3...]")
413 datasetType=datasetType,
415 doMakeDataRefList=doMakeDataRefList,
416 ContainerClass=ContainerClass,
419 if dataIdArgument.isDynamicDatasetType:
420 datasetType.addArgument(parser=self, idName=argName)
424 def parse_args(self, config, args=None, log=None, override=None):
425 """!Parse arguments for a pipeline task 427 @param[in,out] config config for the task being run 428 @param[in] args argument list; if None use sys.argv[1:] 429 @param[in] log log (instance lsst.log Log); if None use the default log 430 @param[in] override a config override function; it must take the root config object 431 as its only argument and must modify the config in place. 432 This function is called after camera-specific overrides files are applied, and before 433 command-line config overrides are applied (thus allowing the user the final word). 435 @return namespace: an argparse.Namespace containing many useful fields including: 436 - camera: camera name 437 - config: the supplied config with all overrides applied, validated and frozen 438 - butler: a butler for the data 439 - an entry for each of the data ID arguments registered by add_id_argument(), 440 the value of which is a DataIdArgument that includes public elements 'idList' and 'refList' 441 - log: a lsst.log Log 442 - an entry for each command-line argument, with the following exceptions: 443 - config is the supplied config, suitably updated 444 - configfile, id and loglevel are all missing 445 - obsPkg: name of obs_ package for this camera 450 if len(args) < 1
or args[0].startswith(
"-")
or args[0].startswith(
"@"):
452 if len(args) == 1
and args[0]
in (
"-h",
"--help"):
455 self.exit(
"%s: error: Must specify input as first argument" % self.prog)
459 namespace = argparse.Namespace()
460 namespace.input = _fixPath(DEFAULT_INPUT_NAME, args[0])
461 if not os.path.isdir(namespace.input):
462 self.error(
"Error: input=%r not found" % (namespace.input,))
464 namespace.config = config
465 namespace.log = log
if log
is not None else lsstLog.Log.getDefaultLogger()
466 mapperClass = dafPersist.Butler.getMapperClass(namespace.input)
467 namespace.camera = mapperClass.getCameraName()
468 namespace.obsPkg = mapperClass.getPackageName()
473 if override
is not None:
474 override(namespace.config)
478 setattr(namespace, dataIdArgument.name, dataIdArgument.ContainerClass(level=dataIdArgument.level))
480 namespace = argparse.ArgumentParser.parse_args(self, args=args, namespace=namespace)
481 del namespace.configfile
485 if namespace.clobberOutput:
486 if namespace.output
is None:
487 self.error(
"--clobber-output is only valid with --output or --rerun")
488 elif namespace.output == namespace.input:
489 self.error(
"--clobber-output is not valid when the output and input repos are the same")
490 if os.path.exists(namespace.output):
491 namespace.log.info(
"Removing output repo %s for --clobber-output", namespace.output)
492 shutil.rmtree(namespace.output)
494 namespace.log.debug(
"input=%s", namespace.input)
495 namespace.log.debug(
"calib=%s", namespace.calib)
496 namespace.log.debug(
"output=%s", namespace.output)
501 if self.
requireOutput and namespace.output
is None and namespace.rerun
is None:
502 self.error(
"no output directory specified.\n" 503 "An output directory must be specified with the --output or --rerun\n" 504 "command-line arguments.\n")
508 butlerArgs = {
'mapperArgs': {
'calibRoot': namespace.calib}}
510 outputs = {
'root': namespace.output,
'mode':
'rw'}
511 inputs = {
'root': namespace.input}
512 inputs.update(butlerArgs)
513 outputs.update(butlerArgs)
514 namespace.butler = dafPersist.Butler(inputs=inputs, outputs=outputs)
516 outputs = {
'root': namespace.input,
'mode':
'rw'}
517 outputs.update(butlerArgs)
518 namespace.butler = dafPersist.Butler(outputs=outputs)
524 if "data" in namespace.show:
526 for dataRef
in getattr(namespace, dataIdName).refList:
527 print(
"%s dataRef.dataId = %s" % (dataIdName, dataRef.dataId))
529 if namespace.show
and "run" not in namespace.show:
537 sys.stderr.write(
"Warning: no 'debug' module found\n")
538 namespace.debug =
False 540 del namespace.loglevel
542 if namespace.longlog:
543 lsstLog.configure_prop(
""" 544 log4j.rootLogger=INFO, A1 545 log4j.appender.A1=ConsoleAppender 546 log4j.appender.A1.Target=System.err 547 log4j.appender.A1.layout=PatternLayout 548 log4j.appender.A1.layout.ConversionPattern=%-5p %d{yyyy-MM-ddThh:mm:ss.sss} %c (%X{LABEL})(%F:%L)- %m%n 550 del namespace.longlog
552 namespace.config.validate()
553 namespace.config.freeze()
557 def _parseDirectories(self, namespace):
558 """Parse input, output and calib directories 560 This allows for hacking the directories, e.g., to include a "rerun". 561 Modifications are made to the 'namespace' object in-place. 563 mapperClass = dafPersist.Butler.getMapperClass(_fixPath(DEFAULT_INPUT_NAME, namespace.rawInput))
564 namespace.calib = _fixPath(DEFAULT_CALIB_NAME, namespace.rawCalib)
567 if namespace.rawOutput:
568 namespace.output = _fixPath(DEFAULT_OUTPUT_NAME, namespace.rawOutput)
570 namespace.output =
None 575 if namespace.rawRerun:
577 self.error(
"Error: cannot specify both --output and --rerun")
578 namespace.rerun = namespace.rawRerun.split(
":")
579 rerunDir = [os.path.join(namespace.input,
"rerun", dd)
for dd
in namespace.rerun]
580 modifiedInput =
False 581 if len(rerunDir) == 2:
582 namespace.input, namespace.output = rerunDir
584 elif len(rerunDir) == 1:
585 namespace.output = rerunDir[0]
586 if os.path.exists(os.path.join(namespace.output,
"_parent")):
587 namespace.input = os.path.realpath(os.path.join(namespace.output,
"_parent"))
590 self.error(
"Error: invalid argument for --rerun: %s" % namespace.rerun)
591 if modifiedInput
and dafPersist.Butler.getMapperClass(namespace.input) != mapperClass:
592 self.error(
"Error: input directory specified by --rerun must have the same mapper as INPUT")
594 namespace.rerun =
None 595 del namespace.rawInput
596 del namespace.rawCalib
597 del namespace.rawOutput
598 del namespace.rawRerun
600 def _processDataIds(self, namespace):
601 """!Process the parsed data for each data ID argument 604 - Validate data ID keys 605 - Cast the data ID values to the correct type 606 - Compute data references from data IDs 608 @param[in,out] namespace parsed namespace (an argparse.Namespace); 609 reads these attributes: 612 - config, if any dynamic dataset types are set by a config parameter 613 - dataset type arguments (e.g. id_dstype), if any dynamic dataset types are specified by such 614 and modifies these attributes: 615 - <name> for each data ID argument registered using add_id_argument 618 dataIdContainer = getattr(namespace, dataIdArgument.name)
619 dataIdContainer.setDatasetType(dataIdArgument.getDatasetType(namespace))
620 if dataIdArgument.doMakeDataRefList:
622 dataIdContainer.castDataIds(butler=namespace.butler)
623 except (KeyError, TypeError)
as e:
628 dataIdContainer.makeDataRefList(namespace)
630 def _applyInitialOverrides(self, namespace):
631 """!Apply obs-package-specific and camera-specific config override files, if found 633 @param[in] namespace parsed namespace (an argparse.Namespace); 634 reads these attributes: 637 Look in the package namespace.obsPkg for files: 638 - config/<task_name>.py 639 - config/<camera_name>/<task_name>.py 642 obsPkgDir = lsst.utils.getPackageDir(namespace.obsPkg)
643 fileName = self.
_name +
".py" 645 os.path.join(obsPkgDir,
"config", fileName),
646 os.path.join(obsPkgDir,
"config", namespace.camera, fileName),
648 if os.path.exists(filePath):
649 namespace.log.info(
"Loading config overrride file %r", filePath)
650 namespace.config.load(filePath)
652 namespace.log.debug(
"Config override file does not exist: %r", filePath)
655 """!Perform camera-specific operations before parsing the command line. 657 The default implementation does nothing. 659 @param[in,out] namespace namespace (an argparse.Namespace) with the following fields: 660 - camera: the camera name 661 - config: the config passed to parse_args, with no overrides applied 662 - obsPkg: the obs_ package for this camera 663 - log: a lsst.log Log 668 """!Allow files of arguments referenced by `@<path>` to contain multiple values on each line 670 @param[in] arg_line line of text read from an argument file 672 arg_line = arg_line.strip()
673 if not arg_line
or arg_line.startswith(
"#"):
675 for arg
in shlex.split(arg_line, comments=
True, posix=
True):
682 """An ArgumentParser for pipeline tasks that don't write any output""" 683 requireOutput =
False 687 """!Get a dictionary of task info for all subtasks in a config 689 Designed to be called recursively; the user should call with only a config 690 (leaving taskDict and baseName at their default values). 692 @param[in] config configuration to process, an instance of lsst.pex.config.Config 693 @param[in,out] taskDict users should not specify this argument; 694 (supports recursion; if provided, taskDict is updated in place, else a new dict is started) 695 @param[in] baseName users should not specify this argument. 696 (supports recursion: if a non-empty string then a period is appended and the result is used 697 as a prefix for additional entries in taskDict; otherwise no prefix is used) 698 @return taskDict: a dict of config field name: task name 702 for fieldName, field
in config.items():
703 if hasattr(field,
"value")
and hasattr(field,
"target"):
704 subConfig = field.value
705 if isinstance(subConfig, pexConfig.Config):
706 subBaseName =
"%s.%s" % (baseName, fieldName)
if baseName
else fieldName
708 taskName =
"%s.%s" % (field.target.__module__, field.target.__name__)
710 taskName = repr(field.target)
711 taskDict[subBaseName] = taskName
712 getTaskDict(config=subConfig, taskDict=taskDict, baseName=subBaseName)
717 """!Process arguments specified with --show (but ignores "data") 719 @param showOpts List of options passed to --show 720 @param config The provided config 721 @param exit Exit if "run" isn't included in showOpts 723 Supports the following options in showOpts: 724 - config[=PAT] Dump all the config entries, or just the ones that match the glob pattern 725 - history=PAT Show where the config entries that match the glob pattern were set 726 - tasks Show task hierarchy 727 - data Ignored; to be processed by caller 728 - run Keep going (the default behaviour is to exit if --show is specified) 730 Calls sys.exit(1) if any other option found. 735 for what
in showOpts:
736 showCommand, showArgs = what.split(
"=", 1)
if "=" in what
else (what,
"")
738 if showCommand ==
"config":
739 matConfig = re.search(
r"^(?:config.)?(.+)?", showArgs)
740 pattern = matConfig.group(1)
742 class FilteredStream(object):
743 """A file object that only prints lines that match the glob "pattern" 745 N.b. Newlines are silently discarded and reinserted; crude but effective. 748 def __init__(self, pattern):
750 mat = re.search(
r"(.*):NOIGNORECASE$", pattern)
753 pattern = mat.group(1)
754 self._pattern = re.compile(fnmatch.translate(pattern))
756 if pattern != pattern.lower():
757 print(
u"Matching \"%s\" without regard to case " 758 "(append :NOIGNORECASE to prevent this)" % (pattern,), file=sys.stdout)
759 self._pattern = re.compile(fnmatch.translate(pattern), re.IGNORECASE)
761 def write(self, showStr):
762 showStr = showStr.rstrip()
764 matchStr = showStr.split(
"\n")[-1].split(
"=")[0]
765 if self._pattern.search(matchStr):
766 print(
u"\n" + showStr)
768 fd = FilteredStream(pattern)
772 config.saveToStream(fd,
"config")
773 elif showCommand ==
"history":
774 matHistory = re.search(
r"^(?:config.)?(.+)?", showArgs)
775 pattern = matHistory.group(1)
777 print(
"Please provide a value with --show history (e.g. history=XXX)", file=sys.stderr)
780 pattern = pattern.split(
".")
781 cpath, cname = pattern[:-1], pattern[-1]
783 for i, cpt
in enumerate(cpath):
785 hconfig = getattr(hconfig, cpt)
786 except AttributeError:
787 print(
"Error: configuration %s has no subconfig %s" %
788 (
".".join([
"config"] + cpath[:i]), cpt), file=sys.stderr)
793 print(pexConfig.history.format(hconfig, cname))
795 print(
"Error: %s has no field %s" % (
".".join([
"config"] + cpath), cname), file=sys.stderr)
798 elif showCommand ==
"data":
800 elif showCommand ==
"run":
802 elif showCommand ==
"tasks":
805 print(
u"Unknown value for show: %s (choose from '%s')" %
806 (what,
"', '".join(
"config[=XXX] data history=XXX tasks run".split())), file=sys.stderr)
809 if exit
and "run" not in showOpts:
814 """!Print task hierarchy to stdout 816 @param[in] config: configuration to process (an lsst.pex.config.Config) 821 fieldNameList = sorted(taskDict.keys())
822 for fieldName
in fieldNameList:
823 taskName = taskDict[fieldName]
824 print(
u"%s: %s" % (fieldName, taskName))
828 """!argparse action callback to override config parameters using name=value pairs from the command line 831 def __call__(self, parser, namespace, values, option_string):
832 """!Override one or more config name value pairs 834 @param[in] parser argument parser (instance of ArgumentParser) 835 @param[in,out] namespace parsed command (an instance of argparse.Namespace); 838 @param[in] values a list of configItemName=value pairs 839 @param[in] option_string option value specified by the user (a str) 841 if namespace.config
is None:
843 for nameValue
in values:
844 name, sep, valueStr = nameValue.partition(
"=")
846 parser.error(
"%s value %s must be in form name=value" % (option_string, nameValue))
851 except AttributeError:
852 parser.error(
"no config field: %s" % (name,))
855 value = eval(valueStr, {})
857 parser.error(
"cannot parse %r as a value for %s" % (valueStr, name))
860 except Exception
as e:
861 parser.error(
"cannot set config.%s=%r: %s" % (name, value, e))
865 """!argparse action to load config overrides from one or more files 868 def __call__(self, parser, namespace, values, option_string=None):
869 """!Load one or more files of config overrides 871 @param[in] parser argument parser (instance of ArgumentParser) 872 @param[in,out] namespace parsed command (an instance of argparse.Namespace); 875 @param[in] values a list of data config file paths 876 @param[in] option_string option value specified by the user (a str) 878 if namespace.config
is None:
880 for configfile
in values:
882 namespace.config.load(configfile)
883 except Exception
as e:
884 parser.error(
"cannot load config file %r: %s" % (configfile, e))
888 """!argparse action callback to process a data ID into a dict 891 def __call__(self, parser, namespace, values, option_string):
892 """!Parse --id data and append results to namespace.<argument>.idList 894 @param[in] parser argument parser (instance of ArgumentParser) 895 @param[in,out] namespace parsed command (an instance of argparse.Namespace); 897 - <idName>.idList, where <idName> is the name of the ID argument, 898 for instance "id" for ID argument --id 899 @param[in] values a list of data IDs; see data format below 900 @param[in] option_string option value specified by the user (a str) 903 key1=value1_1[^value1_2[^value1_3...] key2=value2_1[^value2_2[^value2_3...]... 905 The values (e.g. value1_1) may either be a string, or of the form "int..int" (e.g. "1..3") 906 which is interpreted as "1^2^3" (inclusive, unlike a python range). So "0^2..4^7..9" is 907 equivalent to "0^2^3^4^7^8^9". You may also specify a stride: "1..5:2" is "1^3^5" 909 The cross product is computed for keys with multiple values. For example: 910 --id visit 1^2 ccd 1,1^2,2 911 results in the following data ID dicts being appended to namespace.<argument>.idList: 912 {"visit":1, "ccd":"1,1"} 913 {"visit":2, "ccd":"1,1"} 914 {"visit":1, "ccd":"2,2"} 915 {"visit":2, "ccd":"2,2"} 917 if namespace.config
is None:
919 idDict = collections.OrderedDict()
920 for nameValue
in values:
921 name, sep, valueStr = nameValue.partition(
"=")
923 parser.error(
"%s appears multiple times in one ID argument: %s" % (name, option_string))
925 for v
in valueStr.split(
"^"):
926 mat = re.search(
r"^(\d+)\.\.(\d+)(?::(\d+))?$", v)
928 v1 = int(mat.group(1))
929 v2 = int(mat.group(2))
931 v3 = int(v3)
if v3
else 1
932 for v
in range(v1, v2 + 1, v3):
933 idDict[name].append(str(v))
935 idDict[name].append(v)
937 iterList = [idDict[key]
for key
in idDict.keys()]
938 idDictList = [collections.OrderedDict(zip(idDict.keys(), valList))
939 for valList
in itertools.product(*iterList)]
941 argName = option_string.lstrip(
"-")
942 ident = getattr(namespace, argName)
943 ident.idList += idDictList
947 """!argparse action to set log level 950 def __call__(self, parser, namespace, values, option_string):
953 @param[in] parser argument parser (instance of ArgumentParser) 954 @param[in] namespace parsed command (an instance of argparse.Namespace); ignored 955 @param[in] values a list of trace levels; 956 each item must be of the form 'component_name=level' or 'level', 957 where level is a keyword (not case sensitive) or an integer 958 @param[in] option_string option value specified by the user (a str) 960 permittedLevelList = (
'TRACE',
'DEBUG',
'INFO',
'WARN',
'ERROR',
'FATAL')
961 permittedLevelSet = set(permittedLevelList)
962 for componentLevel
in values:
963 component, sep, levelStr = componentLevel.partition(
"=")
965 levelStr, component = component,
None 966 logLevelUpr = levelStr.upper()
967 if logLevelUpr
in permittedLevelSet:
968 logLevel = getattr(lsstLog.Log, logLevelUpr)
970 parser.error(
"loglevel=%r not one of %s" % (levelStr, permittedLevelList))
971 if component
is None:
972 namespace.log.setLevel(logLevel)
974 lsstLog.Log.getLogger(component).setLevel(logLevel)
978 """!Like setattr, but accepts hierarchical names, e.g. foo.bar.baz 980 @param[in,out] item object whose attribute is to be set 981 @param[in] name name of item to set 982 @param[in] value new value for the item 984 For example if name is foo.bar.baz then item.foo.bar.baz is set to the specified value. 987 subnameList = name.split(
".")
988 for subname
in subnameList[:-1]:
989 subitem = getattr(subitem, subname)
990 setattr(subitem, subnameList[-1], value)
994 """!Like getattr, but accepts hierarchical names, e.g. foo.bar.baz 996 @param[in] item object whose attribute is to be returned 997 @param[in] name name of item to get 999 For example if name is foo.bar.baz then returns item.foo.bar.baz 1002 for subname
in name.split(
"."):
1003 subitem = getattr(subitem, subname)
1008 """!Return True if data exists at the current level or any data exists at a deeper level, False otherwise 1010 @param[in] butler data butler (a \ref lsst.daf.persistence.butler.Butler 1011 "lsst.daf.persistence.Butler") 1012 @param[in] datasetType dataset type (a str) 1013 @param[in] dataRef butler data reference (a \ref lsst.daf.persistence.butlerSubset.ButlerDataRef 1014 "lsst.daf.persistence.ButlerDataRef") 1016 subDRList = dataRef.subItems()
1018 for subDR
in subDRList:
1023 return butler.datasetExists(datasetType=datasetType, dataId=dataRef.dataId)
def addArgument(self, parser, idName)
Add a command-line argument to specify dataset type name, if wanted.
def getTaskDict(config, taskDict=None, baseName="")
Get a dictionary of task info for all subtasks in a config.
def __init__(self, level=None)
Construct a DataIdContainer.
def getDottedAttr(item, name)
Like getattr, but accepts hierarchical names, e.g.
argparse action to set log level
An argument parser for pipeline tasks that is based on argparse.ArgumentParser.
def __init__(self, name=None, help="dataset type to process from input data repository", default=None)
Construct a DatasetArgument.
def getDatasetType(self, namespace)
A dataset type specified by a command-line argument.
def addArgument(self, parser, idName)
Add a command-line argument to specify dataset type name.
Glorified struct for data about id arguments, used by ArgumentParser.add_id_argument.
def makeDataRefList(self, namespace)
Compute refList based on idList.
def setDottedAttr(item, name, value)
Like setattr, but accepts hierarchical names, e.g.
def __call__(self, parser, namespace, values, option_string=None)
Load one or more files of config overrides.
argparse action callback to override config parameters using name=value pairs from the command line ...
def getDatasetType(self, namespace)
def obeyShowArgument(showOpts, config=None, exit=False)
Process arguments specified with –show (but ignores "data")
def getDatasetType(self, namespace)
Return the dataset type as a string.
def _processDataIds(self, namespace)
Process the parsed data for each data ID argument.
def getDatasetType(self, namespace)
def convert_arg_line_to_args(self, arg_line)
Allow files of arguments referenced by @<path> to contain multiple values on each line...
def isDynamicDatasetType(self)
Is the dataset type dynamic (specified on the command line)?
def handleCamera(self, namespace)
Perform camera-specific operations before parsing the command line.
argparse action callback to process a data ID into a dict
def __init__(self, name, datasetType, level, doMakeDataRefList=True, ContainerClass=DataIdContainer)
Constructor.
def __init__(self, name)
Construct a ConfigDatasetType.
def _applyInitialOverrides(self, namespace)
Apply obs-package-specific and camera-specific config override files, if found.
A dataset type specified by a config parameter.
def castDataIds(self, butler)
Validate data IDs and cast them to the correct type (modify idList in place).
def __init__(self, name, usage="%(prog)s input [options]", kwargs)
Construct an ArgumentParser.
Abstract base class for a dataset type determined from parsed command-line arguments.
def dataExists(butler, datasetType, dataRef)
Return True if data exists at the current level or any data exists at a deeper level, False otherwise.
def __call__(self, parser, namespace, values, option_string)
Set trace level.
def setDatasetType(self, datasetType)
Set actual dataset type, once it is known.
argparse action to load config overrides from one or more files
def showTaskHierarchy(config)
Print task hierarchy to stdout.
def __call__(self, parser, namespace, values, option_string)
Parse –id data and append results to namespace.
def __call__(self, parser, namespace, values, option_string)
Override one or more config name value pairs.
def parse_args(self, config, args=None, log=None, override=None)
Parse arguments for a pipeline task.
A container for data IDs and associated data references.
def _parseDirectories(self, namespace)
def add_id_argument(self, name, datasetType, help, level=None, doMakeDataRefList=True, ContainerClass=DataIdContainer)
Add a data ID argument.