Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 55%

291 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-12-01 19:55 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = () 

23 

24import click 

25 

26from ..opt import ( 

27 collection_type_option, 

28 collection_argument, 

29 collections_argument, 

30 collections_option, 

31 components_option, 

32 confirm_option, 

33 dataset_type_option, 

34 datasets_option, 

35 destination_argument, 

36 dimensions_argument, 

37 directory_argument, 

38 element_argument, 

39 glob_argument, 

40 options_file_option, 

41 query_datasets_options, 

42 register_dataset_types_option, 

43 repo_argument, 

44 transfer_option, 

45 verbose_option, 

46 where_option, 

47) 

48 

49from ..utils import ( 

50 ButlerCommand, 

51 MWOptionDecorator, 

52 option_section, 

53 printAstropyTables, 

54 split_commas, 

55 to_upper, 

56 typeStrAcceptsMultiple, 

57 unwrap, 

58 where_help, 

59) 

60 

61from ... import script 

62 

63 

64willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

65existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

66 

67 

68@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

69@repo_argument(required=True) 

70@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

71@query_datasets_options(repo=False, showUri=False, useArguments=False) 

72@options_file_option() 

73def associate(**kwargs): 

74 """Add existing datasets to a tagged collection; searches for datasets with 

75 the options and adds them to the named COLLECTION. 

76 """ 

77 script.associate(**kwargs) 

78 

79 

80# The conversion from the import command name to the butler_import function 

81# name for subcommand lookup is implemented in the cli/butler.py, in 

82# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

83# must be reflected in that location. If this becomes a common pattern a better 

84# mechanism should be implemented. 

85@click.command("import", cls=ButlerCommand) 

86@repo_argument(required=True, help=willCreateRepoHelp) 

87@directory_argument(required=True) 

88@transfer_option() 

89@click.option("--export-file", 

90 help="Name for the file that contains database information associated with the exported " 

91 "datasets. If this is not an absolute path, does not exist in the current working " 

92 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

93 "to \"export.yaml\".", 

94 type=click.File("r")) 

95@click.option("--skip-dimensions", "-s", type=str, multiple=True, callback=split_commas, 

96 metavar=typeStrAcceptsMultiple, 

97 help="Dimensions that should be skipped during import") 

98@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

99@options_file_option() 

100def butler_import(*args, **kwargs): 

101 """Import data into a butler repository.""" 

102 script.butlerImport(*args, **kwargs) 

103 

104 

105@click.command(cls=ButlerCommand) 

106@repo_argument(required=True, help=willCreateRepoHelp) 

107@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

108@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

109@click.option("--standalone", is_flag=True, help="Include all defaults in the config file in the repo, " 

110 "insulating the repo from changes in package defaults.") 

111@click.option("--override", is_flag=True, help="Allow values in the supplied config to override all " 

112 "repo settings.") 

113@click.option("--outfile", "-f", default=None, type=str, help="Name of output file to receive repository " 

114 "configuration. Default is to write butler.yaml into the specified repo.") 

115@options_file_option() 

116def create(*args, **kwargs): 

117 """Create an empty Gen3 Butler repository.""" 

118 script.createRepo(*args, **kwargs) 

119 

120 

121@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

122@repo_argument(required=True, help=existingRepoHelp) 

123@click.option("--subset", "-s", type=str, 

124 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

125 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.") 

126@click.option("--searchpath", "-p", type=str, multiple=True, callback=split_commas, 

127 metavar=typeStrAcceptsMultiple, 

128 help="Additional search paths to use for configuration overrides") 

129@click.option("--file", "outfile", type=click.File("w"), default="-", 

130 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout " 

131 "by default.") 

132@options_file_option() 

133def config_dump(*args, **kwargs): 

134 """Dump either a subset or full Butler configuration to standard output.""" 

135 script.configDump(*args, **kwargs) 

136 

137 

138@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

139@repo_argument(required=True, help=existingRepoHelp) 

140@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

141@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

142@click.option("--ignore", "-i", type=str, multiple=True, callback=split_commas, 

143 metavar=typeStrAcceptsMultiple, 

144 help="DatasetType(s) to ignore for validation.") 

145@options_file_option() 

146def config_validate(*args, **kwargs): 

147 """Validate the configuration files for a Gen3 Butler repository.""" 

148 is_good = script.configValidate(*args, **kwargs) 

149 if not is_good: 

150 raise click.exceptions.Exit(1) 

151 

152 

153@click.command(cls=ButlerCommand) 

154@repo_argument(required=True) 

155@collection_argument(help=unwrap("""COLLECTION is the Name of the collection to remove. If this is a tagged or 

156 chained collection, datasets within the collection are not modified unless --unstore 

157 is passed. If this is a run collection, --purge and --unstore must be passed, and 

158 all datasets in it are fully removed from the data repository.""")) 

159@click.option("--purge", 

160 help=unwrap("""Permit RUN collections to be removed, fully removing datasets within them. 

161 Requires --unstore as an added precaution against accidental deletion. Must not be 

162 passed if the collection is not a RUN."""), 

163 is_flag=True) 

164@click.option("--unstore", 

165 help=("""Remove all datasets in the collection from all datastores in which they appear."""), 

166 is_flag=True) 

167@click.option("--unlink", 

168 help="Before removing the given `collection` unlink it from from this parent collection.", 

169 multiple=True, 

170 callback=split_commas) 

171@confirm_option() 

172@options_file_option() 

173def prune_collection(**kwargs): 

174 """Remove a collection and possibly prune datasets within it.""" 

175 result = script.pruneCollection(**kwargs) 

176 if result.confirm: 

177 print("The following collections will be removed:") 

178 result.removeTable.pprint_all(align="<") 

179 doContinue = click.confirm("Continue?", default=False) 

180 else: 

181 doContinue = True 

182 if doContinue: 

183 result.onConfirmation() 

184 print("Removed collections.") 

185 else: 

186 print("Aborted.") 

187 

188 

189pruneDatasets_wouldRemoveMsg = unwrap("""The following datasets will be removed from any datastores in which 

190 they are present:""") 

191pruneDatasets_wouldDisassociateMsg = unwrap("""The following datasets will be disassociated from {collections} 

192 if they are currently present in it (which is not checked):""") 

193pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap("""The following datasets will be disassociated from 

194 {collections} if they are currently present in it (which is 

195 not checked), and removed from any datastores in which they 

196 are present.""") 

197pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

198pruneDatasets_askContinueMsg = "Continue?" 

199pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

200pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

201pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

202pruneDatasets_noDatasetsFound = "Did not find any datasets." 

203pruneDatasets_errPurgeAndDisassociate = unwrap( 

204 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

205 collections, but --purge forces disassociation from all of them. """ 

206) 

207pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

208pruneDatasets_errNoCollectionRestriction = unwrap( 

209 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all 

210 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

211 collection, or by using --disassociate to select a tagged collection.""") 

212pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

213pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

214 

215disassociate_option = MWOptionDecorator( 

216 "--disassociate", "disassociate_tags", 

217 help=unwrap("""Disassociate pruned datasets from the given tagged collections. May not be used with 

218 --purge."""), 

219 multiple=True, 

220 callback=split_commas, 

221 metavar="TAG" 

222) 

223 

224 

225purge_option = MWOptionDecorator( 

226 "--purge", "purge_run", 

227 help=unwrap("""Completely remove the dataset from the given RUN in the Registry. May not be used with 

228 --disassociate. Note, this may remove provenance information from datasets other than those 

229 provided, and should be used with extreme care."""), 

230 metavar="RUN" 

231) 

232 

233 

234find_all_option = MWOptionDecorator( 

235 "--find-all", is_flag=True, 

236 help=unwrap("""Purge the dataset results from all of the collections in which a dataset of that dataset 

237 type + data id combination appear. (By default only the first found dataset type + data id is 

238 purged, according to the order of COLLECTIONS passed in).""") 

239) 

240 

241 

242unstore_option = MWOptionDecorator( 

243 "--unstore", 

244 is_flag=True, 

245 help=unwrap("""Remove these datasets from all datastores configured with this data repository. If 

246 --disassociate and --purge are not used then --unstore will be used by default. Note that 

247 --unstore will make it impossible to retrieve these datasets even via other collections. 

248 Datasets that are already not stored are ignored by this option.""") 

249) 

250 

251 

252dry_run_option = MWOptionDecorator( 

253 "--dry-run", 

254 is_flag=True, 

255 help=unwrap("""Display the datasets that would be removed but do not remove them. 

256 

257 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

258 will remove it from all of them, even though the only one this will show is its RUN 

259 collection.""") 

260) 

261 

262 

263quiet_option = MWOptionDecorator( 

264 "--quiet", 

265 is_flag=True, 

266 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed.""") 

267) 

268 

269 

270@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

271@repo_argument(required=True) 

272@collections_argument(help=unwrap("""COLLECTIONS is or more expressions that identify the collections to 

273 search for datasets. Glob-style expressions may be used but only if the 

274 --find-all flag is also passed.""")) 

275@option_section("Query Datasets Options:") 

276@datasets_option(help="One or more glob-style expressions that identify the dataset types to be pruned.", 

277 multiple=True, 

278 callback=split_commas) 

279@find_all_option() 

280@where_option(help=where_help) 

281@option_section("Prune Options:") 

282@disassociate_option() 

283@purge_option() 

284@unstore_option() 

285@option_section("Execution Options:") 

286@dry_run_option() 

287@confirm_option() 

288@quiet_option() 

289@option_section("Other Options:") 

290@options_file_option() 

291def prune_datasets(**kwargs): 

292 """Query for and remove one or more datasets from a collection and/or 

293 storage. 

294 """ 

295 quiet = kwargs.pop("quiet", False) 

296 if quiet: 

297 if kwargs["dry_run"]: 

298 raise click.ClickException(pruneDatasets_errQuietWithDryRun) 

299 kwargs["confirm"] = False 

300 

301 result = script.pruneDatasets(**kwargs) 

302 

303 if result.errPurgeAndDisassociate: 

304 raise click.ClickException(pruneDatasets_errPurgeAndDisassociate) 

305 return 

306 if result.errNoCollectionRestriction: 

307 raise click.ClickException(pruneDatasets_errNoCollectionRestriction) 

308 if result.errPruneOnNotRun: 

309 raise click.ClickException(pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

310 if result.errNoOp: 

311 raise click.ClickException(pruneDatasets_errNoOp) 

312 if result.dryRun: 

313 if result.action["disassociate"] and result.action["unstore"]: 

314 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

315 elif result.action["disassociate"]: 

316 msg = pruneDatasets_wouldDisassociateMsg 

317 else: 

318 msg = pruneDatasets_wouldRemoveMsg 

319 print(msg.format(**result.action)) 

320 printAstropyTables(result.tables) 

321 return 

322 if result.confirm: 

323 if not result.tables: 

324 print(pruneDatasets_noDatasetsFound) 

325 return 

326 print(pruneDatasets_willRemoveMsg) 

327 printAstropyTables(result.tables) 

328 doContinue = click.confirm(pruneDatasets_askContinueMsg, default=False) 

329 if doContinue: 

330 result.onConfirmation() 

331 print(pruneDatasets_didRemoveAforementioned) 

332 else: 

333 print(pruneDatasets_didNotRemoveAforementioned) 

334 return 

335 if result.finished: 

336 if not quiet: 

337 print(pruneDatasets_didRemoveMsg) 

338 printAstropyTables(result.tables) 

339 return 

340 

341 

342@click.command(short_help="Search for collections.", cls=ButlerCommand) 

343@repo_argument(required=True) 

344@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

345 "collections to return.") 

346@collection_type_option() 

347@click.option("--chains", 

348 default="table", 

349 help=unwrap("""Affects how results are presented. TABLE lists each dataset in a row with 

350 chained datasets' children listed in a Definition column. TREE lists children below 

351 their parent in tree form. FLATTEN lists all datasets, including child datasets in 

352 one list.Defaults to TABLE. """), 

353 callback=to_upper, 

354 type=click.Choice(("TABLE", "TREE", "FLATTEN"), case_sensitive=False)) 

355@options_file_option() 

356def query_collections(*args, **kwargs): 

357 """Get the collections whose names match an expression.""" 

358 table = script.queryCollections(*args, **kwargs) 

359 # The unit test that mocks script.queryCollections does not return a table 

360 # so we need the following `if`. 

361 if table: 

362 # When chains==TREE, the children of chained datasets are indented 

363 # relative to their parents. For this to work properly the table must 

364 # be left-aligned. 

365 table.pprint_all(align="<") 

366 

367 

368@click.command(cls=ButlerCommand) 

369@repo_argument(required=True) 

370@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

371 "dataset types to return.") 

372@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

373@components_option() 

374@options_file_option() 

375def query_dataset_types(*args, **kwargs): 

376 """Get the dataset types in a repository.""" 

377 table = script.queryDatasetTypes(*args, **kwargs) 

378 if table: 

379 table.pprint_all() 

380 else: 

381 print("No results. Try --help for more information.") 

382 

383 

384@click.command(cls=ButlerCommand) 

385@repo_argument(required=True) 

386@click.argument('dataset-type-name', nargs=1) 

387def remove_dataset_type(*args, **kwargs): 

388 """Remove a dataset type definition from a repository.""" 

389 script.removeDatasetType(*args, **kwargs) 

390 

391 

392@click.command(cls=ButlerCommand) 

393@query_datasets_options() 

394@options_file_option() 

395def query_datasets(**kwargs): 

396 """List the datasets in a repository.""" 

397 for table in script.QueryDatasets(**kwargs).getTables(): 

398 print("") 

399 table.pprint_all() 

400 print("") 

401 

402 

403@click.command(cls=ButlerCommand) 

404@repo_argument(required=True) 

405@click.argument('input-collection') 

406@click.argument('output-collection') 

407@click.argument('dataset-type-name') 

408@click.option("--begin-date", type=str, default=None, 

409 help=unwrap("""ISO-8601 datetime (TAI) of the beginning of the validity range for the 

410 certified calibrations.""")) 

411@click.option("--end-date", type=str, default=None, 

412 help=unwrap("""ISO-8601 datetime (TAI) of the end of the validity range for the 

413 certified calibrations.""")) 

414@click.option("--search-all-inputs", is_flag=True, default=False, 

415 help=unwrap("""Search all children of the inputCollection if it is a CHAINED collection, 

416 instead of just the most recent one.""")) 

417@options_file_option() 

418def certify_calibrations(*args, **kwargs): 

419 """Certify calibrations in a repository. 

420 """ 

421 script.certifyCalibrations(*args, **kwargs) 

422 

423 

424@click.command(cls=ButlerCommand) 

425@repo_argument(required=True) 

426@dimensions_argument(help=unwrap("""DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

427 instrument, or tract. Will be expanded to include any dependencies.""")) 

428@collections_option(help=collections_option.help + " May only be used with --datasets.") 

429@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should 

430 constrain the yielded data IDs. For example, including "raw" here would 

431 constrain the yielded "instrument", "exposure", "detector", and 

432 "physical_filter" values to only those for which at least one "raw" dataset 

433 exists in "collections". Requires --collections.""")) 

434@where_option(help=where_help) 

435@options_file_option() 

436def query_data_ids(**kwargs): 

437 """List the data IDs in a repository. 

438 """ 

439 table = script.queryDataIds(**kwargs) 

440 if table: 

441 table.pprint_all() 

442 else: 

443 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

444 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

445 else: 

446 print("No results. Try --help for more information.") 

447 

448 

449@click.command(cls=ButlerCommand) 

450@repo_argument(required=True) 

451@element_argument(required=True) 

452@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should 

453 constrain the yielded records. May only be used with 

454 --collections.""")) 

455@collections_option(help=collections_option.help + " May only be used with --datasets.") 

456@where_option(help=where_help) 

457@click.option("--no-check", is_flag=True, 

458 help=unwrap("""Don't check the query before execution. By default the query is checked before it 

459 executed, this may reject some valid queries that resemble common mistakes.""")) 

460@options_file_option() 

461def query_dimension_records(**kwargs): 

462 """Query for dimension information.""" 

463 table = script.queryDimensionRecords(**kwargs) 

464 if table: 

465 table.pprint_all() 

466 else: 

467 print("No results. Try --help for more information.") 

468 

469 

470@click.command(cls=ButlerCommand) 

471@repo_argument(required=True) 

472@query_datasets_options(showUri=False, useArguments=False, repo=False) 

473@destination_argument(help="Destination URI of folder to receive file artifacts.") 

474@transfer_option() 

475@verbose_option(help="Report destination location of all transferred artifacts.") 

476@click.option("--preserve-path/--no-preserve-path", is_flag=True, default=True, 

477 help="Preserve the datastore path to the artifact at the destination.") 

478@click.option("--clobber/--no-clobber", is_flag=True, default=False, 

479 help="If clobber, overwrite files if they exist locally.") 

480@options_file_option() 

481def retrieve_artifacts(**kwargs): 

482 """Retrieve file artifacts associated with datasets in a repository.""" 

483 verbose = kwargs.pop("verbose") 

484 transferred = script.retrieveArtifacts(**kwargs) 

485 if verbose and transferred: 

486 print(f"Transferred the following to {kwargs['destination']}:") 

487 for uri in transferred: 

488 print(uri) 

489 print() 

490 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

491 

492 

493@click.command(cls=ButlerCommand) 

494@click.argument("source", required=True) 

495@click.argument("dest", required=True) 

496@query_datasets_options(showUri=False, useArguments=False, repo=False) 

497@transfer_option() 

498@register_dataset_types_option() 

499@options_file_option() 

500def transfer_datasets(**kwargs): 

501 """Transfer datasets from a source butler to a destination butler. 

502 

503 SOURCE is a URI to the Butler repository containing the RUN dataset. 

504 

505 DEST is a URI to the Butler repository that will receive copies of the 

506 datasets. 

507 """ 

508 number = script.transferDatasets(**kwargs) 

509 print(f"Number of datasets transferred: {number}") 

510 

511 

512@click.command(cls=ButlerCommand) 

513@repo_argument(required=True) 

514@click.argument("parent", required=True, nargs=1) 

515@click.argument("children", required=False, nargs=-1, callback=split_commas) 

516@click.option("--doc", default="", 

517 help="Documentation string associated with this collection. " 

518 "Only relevant if the collection is newly created.") 

519@click.option("--flatten/--no-flatten", default=False, 

520 help="If `True` recursively flatten out any nested chained collections in children first.") 

521@click.option("--mode", 

522 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

523 default="redefine", 

524 help="Update mode: " 

525 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

526 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

527 "'pop': Pop a numbered element off the chain. Defaults to popping " 

528 "the first element (0). ``children`` must be integers if given. " 

529 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

530 "'extend': Modify existing chain to extend it with the supplied CHILDREN.") 

531def collection_chain(**kwargs): 

532 """Define a collection chain. 

533 

534 PARENT is the name of the chained collection to create or modify. If the 

535 collection already exists the chain associated with it will be updated. 

536 

537 CHILDREN are the collections to be used to modify the chain. The supplied 

538 values will be split on comma. The exact usage depends on the MODE option. 

539 For example, 

540 

541 $ butler collection-chain REPO PARENT child1,child2 child3 

542 

543 will result in three children being included in the chain. 

544 

545 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

546 collections to be removed from the current chain. 

547 MODE 'pop' can take negative integers to indicate removal relative to the 

548 end of the chain, but when doing that '--' must be given to indicate the 

549 end of the options specification. 

550 

551 $ butler collection-chain REPO --mode=pop PARENT -- -1 

552 

553 Will remove the final collection from the chain. 

554 """ 

555 chain = script.collectionChain(**kwargs) 

556 print(f"[{', '.join(chain)}]") 

557 

558 

559@click.command(cls=ButlerCommand) 

560@repo_argument(required=True) 

561@click.argument("dataset_type", required=True) 

562@click.argument("run", required=True) 

563@click.argument("table_file", required=True) 

564@click.option("--formatter", type=str, 

565 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

566 " will be determined from the dataset type and datastore configuration.") 

567@click.option("--id-generation-mode", 

568 default="UNIQUE", 

569 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

570 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

571 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

572 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

573 " repositories.", 

574 callback=to_upper, 

575 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False)) 

576@click.option("--data-id", 

577 type=str, 

578 multiple=True, callback=split_commas, 

579 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

580 " files. This can be used to simplify the table file by removing repeated entries that are" 

581 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

582 " comma separation or multiple command line options.") 

583@click.option("--prefix", 

584 type=str, 

585 help="For relative paths in the table file, specify a prefix to use. The default is to" 

586 " use the current working directory.") 

587@transfer_option() 

588def ingest_files(**kwargs): 

589 """Ingest files from table file. 

590 

591 DATASET_TYPE is the name of the dataset type to be associated with these 

592 files. This dataset type must already exist and will not be created by 

593 this command. There can only be one dataset type per invocation of this 

594 command. 

595 

596 RUN is the run to use for the file ingest. 

597 

598 TABLE_FILE refers to a file that can be read by astropy.table with 

599 columns of: 

600 

601 file URI, dimension1, dimension2, ..., dimensionN 

602 

603 where the first column is the URI to the file to be ingested and the 

604 remaining columns define the dataId to associate with that file. 

605 The column names should match the dimensions for the specified dataset 

606 type. Relative file URI by default is assumed to be relative to the 

607 current working directory but can be overridden using the ``--prefix`` 

608 option. 

609 

610 This command does not create dimension records and so any records must 

611 be created by other means. This command should not be used to ingest 

612 raw camera exposures. 

613 """ 

614 script.ingest_files(**kwargs) 

615 

616 

617@click.command(cls=ButlerCommand) 

618@repo_argument(required=True) 

619@click.argument("dataset_type", required=True) 

620@click.argument("storage_class", required=True) 

621@click.argument("dimensions", required=False, nargs=-1) 

622@click.option("--is-calibration/--no-is-calibration", is_flag=True, default=False, 

623 help="Indicate that this dataset type can be part of a calibration collection.") 

624def register_dataset_type(**kwargs): 

625 """Register a new dataset type with this butler repository. 

626 

627 DATASET_TYPE is the name of the dataset type. 

628 

629 STORAGE_CLASS is the name of the StorageClass to be associated with 

630 this dataset type. 

631 

632 DIMENSIONS is a list of all the dimensions relevant to this 

633 dataset type. It can be an empty list. 

634 

635 A component dataset type (such as "something.component") is not a 

636 real dataset type and so can not be defined by this command. They are 

637 automatically derived from the composite dataset type when a composite 

638 storage class is specified. 

639 """ 

640 inserted = script.register_dataset_type(**kwargs) 

641 if inserted: 

642 print("Dataset type successfully registered.") 

643 else: 

644 print("Dataset type already existed in identical form.") 

645 

646 

647@click.command(cls=ButlerCommand) 

648@repo_argument(required=True) 

649@directory_argument(required=True) 

650@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

651def export_calibs(*args, **kwargs): 

652 """Export calibrations from the butler for import elsewhere.""" 

653 table = script.exportCalibs(*args, **kwargs) 

654 if table: 

655 table.pprint_all(align="<")