Coverage for python/lsst/daf/butler/cli/cmd/commands.py : 83%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import click
23import yaml
25from ..opt import (collection_type_option, dataset_type_option, directory_argument, options_file_option,
26 glob_argument, repo_argument, run_option, transfer_option, verbose_option)
27from ..utils import cli_handle_exception, split_commas, typeStrAcceptsMultiple, unwrap
28from ...script import (butlerImport, createRepo, configDump, configValidate, pruneCollection,
29 queryCollections, queryDatasetTypes)
31willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist."
32existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file."
35# The conversion from the import command name to the butler_import function
36# name for subcommand lookup is implemented in the cli/butler.py, in
37# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they
38# must be reflected in that location. If this becomes a common pattern a better
39# mechanism should be implemented.
40@click.command("import")
41@repo_argument(required=True, help=willCreateRepoHelp)
42@directory_argument(required=True)
43@transfer_option()
44@run_option(required=True)
45@click.option("--export-file",
46 help="Name for the file that contains database information associated with the exported "
47 "datasets. If this is not an absolute path, does not exist in the current working "
48 "directory, and --dir is provided, it is assumed to be in that directory. Defaults "
49 "to \"export.yaml\".",
50 type=click.File("r"))
51@click.option("--skip-dimensions", "-s", type=str, multiple=True, callback=split_commas,
52 metavar=typeStrAcceptsMultiple,
53 help="Dimensions that should be skipped during import")
54@options_file_option()
55def butler_import(*args, **kwargs):
56 """Import data into a butler repository."""
57 cli_handle_exception(butlerImport, *args, **kwargs)
60@click.command()
61@repo_argument(required=True, help=willCreateRepoHelp)
62@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).")
63@click.option("--standalone", is_flag=True, help="Include all defaults in the config file in the repo, "
64 "insulating the repo from changes in package defaults.")
65@click.option("--override", is_flag=True, help="Allow values in the supplied config to override all "
66 "repo settings.")
67@click.option("--outfile", "-f", default=None, type=str, help="Name of output file to receive repository "
68 "configuration. Default is to write butler.yaml into the specified repo.")
69@options_file_option()
70def create(*args, **kwargs):
71 """Create an empty Gen3 Butler repository."""
72 cli_handle_exception(createRepo, *args, **kwargs)
75@click.command(short_help="Dump butler config to stdout.")
76@repo_argument(required=True, help=existingRepoHelp)
77@click.option("--subset", "-s", type=str,
78 help="Subset of a configuration to report. This can be any key in the hierarchy such as "
79 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.")
80@click.option("--searchpath", "-p", type=str, multiple=True, callback=split_commas,
81 metavar=typeStrAcceptsMultiple,
82 help="Additional search paths to use for configuration overrides")
83@click.option("--file", "outfile", type=click.File("w"), default="-",
84 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout "
85 "by default.")
86@options_file_option()
87def config_dump(*args, **kwargs):
88 """Dump either a subset or full Butler configuration to standard output."""
89 cli_handle_exception(configDump, *args, **kwargs)
92@click.command(short_help="Validate the configuration files.")
93@repo_argument(required=True, help=existingRepoHelp)
94@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.")
95@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True)
96@click.option("--ignore", "-i", type=str, multiple=True, callback=split_commas,
97 metavar=typeStrAcceptsMultiple,
98 help="DatasetType(s) to ignore for validation.")
99@options_file_option()
100def config_validate(*args, **kwargs):
101 """Validate the configuration files for a Gen3 Butler repository."""
102 is_good = cli_handle_exception(configValidate, *args, **kwargs)
103 if not is_good:
104 raise click.exceptions.Exit(1)
107@click.command()
108@repo_argument(required=True)
109@click.option("--collection",
110 help=unwrap("""Name of the collection to remove. If this is a TAGGED or CHAINED collection,
111 datasets within the collection are not modified unless --unstore is passed. If this
112 is a RUN collection, --purge and --unstore must be passed, and all datasets in it
113 are fully removed from the data repository. """))
114@click.option("--purge",
115 help=unwrap("""Permit RUN collections to be removed, fully removing datasets within them.
116 Requires --unstore as an added precaution against accidental deletion. Must not be
117 passed if the collection is not a RUN."""),
118 is_flag=True)
119@click.option("--unstore",
120 help=("""Remove all datasets in the collection from all datastores in which they appear."""),
121 is_flag=True)
122@options_file_option()
123def prune_collection(**kwargs):
124 """Remove a collection and possibly prune datasets within it."""
125 cli_handle_exception(pruneCollection, **kwargs)
128@click.command(short_help="Search for collections.")
129@repo_argument(required=True)
130@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the "
131 "collections to return.")
132@collection_type_option()
133@click.option("--flatten-chains/--no-flatten-chains",
134 help="Recursively get the child collections of matching CHAINED collections. Default is "
135 "--no-flatten-chains.")
136@click.option("--include-chains/--no-include-chains",
137 default=None,
138 help="For --include-chains, return records for matching CHAINED collections. For "
139 "--no-include-chains do not return records for CHAINED collections. Default is the "
140 "opposite of --flatten-chains: include either CHAINED collections or their children, but "
141 "not both.")
142@options_file_option()
143def query_collections(*args, **kwargs):
144 """Get the collections whose names match an expression."""
145 print(yaml.dump(cli_handle_exception(queryCollections, *args, **kwargs)))
148@click.command()
149@repo_argument(required=True)
150@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the "
151 "dataset types to return.")
152@verbose_option(help="Include dataset type name, dimensions, and storage class in output.")
153@click.option("--components/--no-components",
154 default=None,
155 help="For --components, apply all expression patterns to component dataset type names as well. "
156 "For --no-components, never apply patterns to components. Default (where neither is "
157 "specified) is to apply patterns to components only if their parent datasets were not "
158 "matched by the expression. Fully-specified component datasets (`str` or `DatasetType` "
159 "instances) are always included.")
160@options_file_option()
161def query_dataset_types(*args, **kwargs):
162 """Get the dataset types in a repository."""
163 print(yaml.dump(cli_handle_exception(queryDatasetTypes, *args, **kwargs), sort_keys=False))