Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22import click 

23import yaml 

24 

25from ..opt import ( 

26 collection_type_option, 

27 collections_option, 

28 dataset_type_option, 

29 datasets_option, 

30 dimensions_argument, 

31 directory_argument, 

32 glob_argument, 

33 options_file_option, 

34 repo_argument, 

35 transfer_option, 

36 verbose_option, 

37 where_option, 

38) 

39 

40from ..utils import cli_handle_exception, split_commas, typeStrAcceptsMultiple, unwrap 

41from ... import script 

42 

43 

44willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

45existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

46whereHelp = unwrap("""A string expression similar to a SQL WHERE clause. May involve any column of a dimension 

47 table or a dimension name as a shortcut for the primary key column of a dimension 

48 table.""") 

49 

50 

51# The conversion from the import command name to the butler_import function 

52# name for subcommand lookup is implemented in the cli/butler.py, in 

53# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

54# must be reflected in that location. If this becomes a common pattern a better 

55# mechanism should be implemented. 

56@click.command("import") 

57@repo_argument(required=True, help=willCreateRepoHelp) 

58@directory_argument(required=True) 

59@transfer_option() 

60@click.option("--export-file", 

61 help="Name for the file that contains database information associated with the exported " 

62 "datasets. If this is not an absolute path, does not exist in the current working " 

63 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

64 "to \"export.yaml\".", 

65 type=click.File("r")) 

66@click.option("--skip-dimensions", "-s", type=str, multiple=True, callback=split_commas, 

67 metavar=typeStrAcceptsMultiple, 

68 help="Dimensions that should be skipped during import") 

69@options_file_option() 

70def butler_import(*args, **kwargs): 

71 """Import data into a butler repository.""" 

72 cli_handle_exception(script.butlerImport, *args, **kwargs) 

73 

74 

75@click.command() 

76@repo_argument(required=True, help=willCreateRepoHelp) 

77@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

78@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

79@click.option("--standalone", is_flag=True, help="Include all defaults in the config file in the repo, " 

80 "insulating the repo from changes in package defaults.") 

81@click.option("--override", is_flag=True, help="Allow values in the supplied config to override all " 

82 "repo settings.") 

83@click.option("--outfile", "-f", default=None, type=str, help="Name of output file to receive repository " 

84 "configuration. Default is to write butler.yaml into the specified repo.") 

85@options_file_option() 

86def create(*args, **kwargs): 

87 """Create an empty Gen3 Butler repository.""" 

88 cli_handle_exception(script.createRepo, *args, **kwargs) 

89 

90 

91@click.command(short_help="Dump butler config to stdout.") 

92@repo_argument(required=True, help=existingRepoHelp) 

93@click.option("--subset", "-s", type=str, 

94 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

95 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.") 

96@click.option("--searchpath", "-p", type=str, multiple=True, callback=split_commas, 

97 metavar=typeStrAcceptsMultiple, 

98 help="Additional search paths to use for configuration overrides") 

99@click.option("--file", "outfile", type=click.File("w"), default="-", 

100 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout " 

101 "by default.") 

102@options_file_option() 

103def config_dump(*args, **kwargs): 

104 """Dump either a subset or full Butler configuration to standard output.""" 

105 cli_handle_exception(script.configDump, *args, **kwargs) 

106 

107 

108@click.command(short_help="Validate the configuration files.") 

109@repo_argument(required=True, help=existingRepoHelp) 

110@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

111@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

112@click.option("--ignore", "-i", type=str, multiple=True, callback=split_commas, 

113 metavar=typeStrAcceptsMultiple, 

114 help="DatasetType(s) to ignore for validation.") 

115@options_file_option() 

116def config_validate(*args, **kwargs): 

117 """Validate the configuration files for a Gen3 Butler repository.""" 

118 is_good = cli_handle_exception(script.configValidate, *args, **kwargs) 

119 if not is_good: 

120 raise click.exceptions.Exit(1) 

121 

122 

123@click.command() 

124@repo_argument(required=True) 

125@click.option("--collection", 

126 help=unwrap("""Name of the collection to remove. If this is a TAGGED or CHAINED collection, 

127 datasets within the collection are not modified unless --unstore is passed. If this 

128 is a RUN collection, --purge and --unstore must be passed, and all datasets in it 

129 are fully removed from the data repository. """)) 

130@click.option("--purge", 

131 help=unwrap("""Permit RUN collections to be removed, fully removing datasets within them. 

132 Requires --unstore as an added precaution against accidental deletion. Must not be 

133 passed if the collection is not a RUN."""), 

134 is_flag=True) 

135@click.option("--unstore", 

136 help=("""Remove all datasets in the collection from all datastores in which they appear."""), 

137 is_flag=True) 

138@options_file_option() 

139def prune_collection(**kwargs): 

140 """Remove a collection and possibly prune datasets within it.""" 

141 cli_handle_exception(script.pruneCollection, **kwargs) 

142 

143 

144@click.command(short_help="Search for collections.") 

145@repo_argument(required=True) 

146@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

147 "collections to return.") 

148@collection_type_option() 

149@click.option("--flatten-chains/--no-flatten-chains", 

150 help="Recursively get the child collections of matching CHAINED collections. Default is " 

151 "--no-flatten-chains.") 

152@click.option("--include-chains/--no-include-chains", 

153 default=None, 

154 help="For --include-chains, return records for matching CHAINED collections. For " 

155 "--no-include-chains do not return records for CHAINED collections. Default is the " 

156 "opposite of --flatten-chains: include either CHAINED collections or their children, but " 

157 "not both.") 

158@options_file_option() 

159def query_collections(*args, **kwargs): 

160 """Get the collections whose names match an expression.""" 

161 print(yaml.dump(cli_handle_exception(script.queryCollections, *args, **kwargs))) 

162 

163 

164@click.command() 

165@repo_argument(required=True) 

166@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

167 "dataset types to return.") 

168@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

169@click.option("--components/--no-components", 

170 default=None, 

171 help="For --components, apply all expression patterns to component dataset type names as well. " 

172 "For --no-components, never apply patterns to components. Default (where neither is " 

173 "specified) is to apply patterns to components only if their parent datasets were not " 

174 "matched by the expression. Fully-specified component datasets (`str` or `DatasetType` " 

175 "instances) are always included.") 

176@options_file_option() 

177def query_dataset_types(*args, **kwargs): 

178 """Get the dataset types in a repository.""" 

179 print(yaml.dump(cli_handle_exception(script.queryDatasetTypes, *args, **kwargs), sort_keys=False)) 

180 

181 

182@click.command() 

183@repo_argument(required=True) 

184@click.argument('dataset-type-name', nargs=1) 

185def remove_dataset_type(*args, **kwargs): 

186 """Remove a dataset type definition from a repository.""" 

187 cli_handle_exception(script.removeDatasetType, *args, **kwargs) 

188 

189 

190@click.command() 

191@repo_argument(required=True) 

192@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

193 "dataset types to be queried.") 

194@collections_option() 

195@where_option(help=whereHelp) 

196@click.option("--find-first", 

197 is_flag=True, 

198 help=unwrap("""For each result data ID, only yield one DatasetRef of each DatasetType, from the 

199 first collection in which a dataset of that dataset type appears (according to the 

200 order of 'collections' passed in). If used, 'collections' must specify at least one 

201 expression and must not contain wildcards.""")) 

202@click.option("--show-uri", 

203 is_flag=True, 

204 help="Show the dataset URI in results.") 

205@options_file_option() 

206def query_datasets(**kwargs): 

207 """List the datasets in a repository.""" 

208 tables = cli_handle_exception(script.queryDatasets, **kwargs) 

209 

210 for table in tables: 

211 print("") 

212 table.pprint_all() 

213 print("") 

214 

215 

216@click.command() 

217@repo_argument(required=True) 

218@click.argument('input-collection') 

219@click.argument('output-collection') 

220@click.argument('dataset-type-name') 

221@click.option("--begin-date", type=str, default=None, 

222 help=unwrap("""ISO-8601 datetime (TAI) of the beginning of the validity range for the 

223 certified calibrations.""")) 

224@click.option("--end-date", type=str, default=None, 

225 help=unwrap("""ISO-8601 datetime (TAI) of the end of the validity range for the 

226 certified calibrations.""")) 

227@click.option("--search-all-inputs", is_flag=True, default=False, 

228 help=unwrap("""Search all children of the inputCollection if it is a CHAINED collection, 

229 instead of just the most recent one.""")) 

230def certify_calibrations(*args, **kwargs): 

231 """Certify calibrations in a repository. 

232 """ 

233 cli_handle_exception(script.certifyCalibrations, *args, **kwargs) 

234 

235 

236@click.command() 

237@repo_argument(required=True) 

238@dimensions_argument(help=unwrap("""DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

239 instrument, or tract. Will be expanded to include any dependencies.""")) 

240@collections_option() 

241@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should 

242 constrain the yielded data IDs. For example, including "raw" here would 

243 constrain the yielded "instrument", "exposure", "detector", and 

244 "physical_filter" values to only those for which at least one "raw" dataset 

245 exists in "collections".""")) 

246@where_option(help=whereHelp) 

247@options_file_option() 

248def query_data_ids(**kwargs): 

249 """List the data IDs in a repository. 

250 """ 

251 table = cli_handle_exception(script.queryDataIds, **kwargs) 

252 if table: 

253 table.pprint_all() 

254 else: 

255 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

256 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

257 else: 

258 print("No results. Try --help for more information.")