Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 56%

290 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-07-12 10:56 -0700

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23__all__ = () 

24 

25import warnings 

26from typing import Any 

27 

28import click 

29 

30from ... import script 

31from ..opt import ( 

32 collection_argument, 

33 collection_type_option, 

34 collections_argument, 

35 collections_option, 

36 components_option, 

37 confirm_option, 

38 dataset_type_option, 

39 datasets_option, 

40 destination_argument, 

41 dimensions_argument, 

42 directory_argument, 

43 element_argument, 

44 glob_argument, 

45 limit_option, 

46 offset_option, 

47 options_file_option, 

48 order_by_option, 

49 query_datasets_options, 

50 register_dataset_types_option, 

51 repo_argument, 

52 transfer_dimensions_option, 

53 transfer_option, 

54 verbose_option, 

55 where_option, 

56) 

57from ..utils import ( 

58 ButlerCommand, 

59 MWOptionDecorator, 

60 option_section, 

61 printAstropyTables, 

62 split_commas, 

63 to_upper, 

64 typeStrAcceptsMultiple, 

65 unwrap, 

66 where_help, 

67) 

68 

69willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

70existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

71 

72 

73@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

74@repo_argument(required=True) 

75@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

76@query_datasets_options(repo=False, showUri=False, useArguments=False) 

77@options_file_option() 

78def associate(**kwargs: Any) -> None: 

79 """Add existing datasets to a tagged collection; searches for datasets with 

80 the options and adds them to the named COLLECTION. 

81 """ 

82 script.associate(**kwargs) 

83 

84 

85# The conversion from the import command name to the butler_import function 

86# name for subcommand lookup is implemented in the cli/butler.py, in 

87# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

88# must be reflected in that location. If this becomes a common pattern a better 

89# mechanism should be implemented. 

90@click.command("import", cls=ButlerCommand) 

91@repo_argument(required=True, help=willCreateRepoHelp) 

92@directory_argument(required=True) 

93@transfer_option() 

94@click.option( 

95 "--export-file", 

96 help="Name for the file that contains database information associated with the exported " 

97 "datasets. If this is not an absolute path, does not exist in the current working " 

98 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

99 'to "export.yaml".', 

100 type=str, 

101) 

102@click.option( 

103 "--skip-dimensions", 

104 "-s", 

105 type=str, 

106 multiple=True, 

107 callback=split_commas, 

108 metavar=typeStrAcceptsMultiple, 

109 help="Dimensions that should be skipped during import", 

110) 

111@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

112@options_file_option() 

113def butler_import(*args: Any, **kwargs: Any) -> None: 

114 """Import data into a butler repository.""" 

115 # `reuse_ids`` is not used by `butlerImport`. 

116 reuse_ids = kwargs.pop("reuse_ids", False) 

117 if reuse_ids: 

118 warnings.warn("--reuse-ids option is deprecated and will be removed after v26.", FutureWarning) 

119 

120 script.butlerImport(*args, **kwargs) 

121 

122 

123@click.command(cls=ButlerCommand) 

124@repo_argument(required=True, help=willCreateRepoHelp) 

125@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

126@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

127@click.option( 

128 "--standalone", 

129 is_flag=True, 

130 help="Include all defaults in the config file in the repo, " 

131 "insulating the repo from changes in package defaults.", 

132) 

133@click.option( 

134 "--override", is_flag=True, help="Allow values in the supplied config to override all repo settings." 

135) 

136@click.option( 

137 "--outfile", 

138 "-f", 

139 default=None, 

140 type=str, 

141 help="Name of output file to receive repository " 

142 "configuration. Default is to write butler.yaml into the specified repo.", 

143) 

144@options_file_option() 

145def create(*args: Any, **kwargs: Any) -> None: 

146 """Create an empty Gen3 Butler repository.""" 

147 script.createRepo(*args, **kwargs) 

148 

149 

150@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

151@repo_argument(required=True, help=existingRepoHelp) 

152@click.option( 

153 "--subset", 

154 "-s", 

155 type=str, 

156 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

157 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.", 

158) 

159@click.option( 

160 "--searchpath", 

161 "-p", 

162 type=str, 

163 multiple=True, 

164 callback=split_commas, 

165 metavar=typeStrAcceptsMultiple, 

166 help="Additional search paths to use for configuration overrides", 

167) 

168@click.option( 

169 "--file", 

170 "outfile", 

171 type=click.File(mode="w"), 

172 default="-", 

173 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout by default.", 

174) 

175@options_file_option() 

176def config_dump(*args: Any, **kwargs: Any) -> None: 

177 """Dump either a subset or full Butler configuration to standard output.""" 

178 script.configDump(*args, **kwargs) 

179 

180 

181@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

182@repo_argument(required=True, help=existingRepoHelp) 

183@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

184@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

185@click.option( 

186 "--ignore", 

187 "-i", 

188 type=str, 

189 multiple=True, 

190 callback=split_commas, 

191 metavar=typeStrAcceptsMultiple, 

192 help="DatasetType(s) to ignore for validation.", 

193) 

194@options_file_option() 

195def config_validate(*args: Any, **kwargs: Any) -> None: 

196 """Validate the configuration files for a Gen3 Butler repository.""" 

197 is_good = script.configValidate(*args, **kwargs) 

198 if not is_good: 

199 raise click.exceptions.Exit(1) 

200 

201 

202pruneDatasets_wouldRemoveMsg = unwrap( 

203 """The following datasets will be removed from any datastores in which 

204 they are present:""" 

205) 

206pruneDatasets_wouldDisassociateMsg = unwrap( 

207 """The following datasets will be disassociated from {collections} 

208 if they are currently present in it (which is not checked):""" 

209) 

210pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap( 

211 """The following datasets will be disassociated from 

212 {collections} if they are currently present in it (which is 

213 not checked), and removed from any datastores in which they 

214 are present.""" 

215) 

216pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

217pruneDatasets_askContinueMsg = "Continue?" 

218pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

219pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

220pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

221pruneDatasets_noDatasetsFound = "Did not find any datasets." 

222pruneDatasets_errPurgeAndDisassociate = unwrap( 

223 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

224 collections, but --purge forces disassociation from all of them. """ 

225) 

226pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

227pruneDatasets_errNoCollectionRestriction = unwrap( 

228 """Must indicate collections from which to prune datasets by passing COLLECTION arguments (select all 

229 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

230 collection, or by using --disassociate to select a tagged collection.""" 

231) 

232pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

233pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

234 

235disassociate_option = MWOptionDecorator( 

236 "--disassociate", 

237 "disassociate_tags", 

238 help=unwrap( 

239 """Disassociate pruned datasets from the given tagged collections. May not be used with 

240 --purge.""" 

241 ), 

242 multiple=True, 

243 callback=split_commas, 

244 metavar="TAG", 

245) 

246 

247 

248purge_option = MWOptionDecorator( 

249 "--purge", 

250 "purge_run", 

251 help=unwrap( 

252 """Completely remove the dataset from the given RUN in the Registry. May not be used with 

253 --disassociate. Implies --unstore. Note, this may remove provenance information from 

254 datasets other than those provided, and should be used with extreme care. 

255 RUN has to be provided for backward compatibility, but is used only if COLLECTIONS is 

256 not provided. Otherwise, datasets will be removed from 

257 any RUN-type collections in COLLECTIONS.""" 

258 ), 

259 metavar="RUN", 

260) 

261 

262 

263find_all_option = MWOptionDecorator( 

264 "--find-all", 

265 is_flag=True, 

266 help=unwrap( 

267 """Purge the dataset results from all of the collections in which a dataset of that dataset 

268 type + data id combination appear. (By default only the first found dataset type + data id is 

269 purged, according to the order of COLLECTIONS passed in).""" 

270 ), 

271) 

272 

273 

274unstore_option = MWOptionDecorator( 

275 "--unstore", 

276 is_flag=True, 

277 help=unwrap( 

278 """Remove these datasets from all datastores configured with this data repository. If 

279 --disassociate and --purge are not used then --unstore will be used by default. Note that 

280 --unstore will make it impossible to retrieve these datasets even via other collections. 

281 Datasets that are already not stored are ignored by this option.""" 

282 ), 

283) 

284 

285 

286dry_run_option = MWOptionDecorator( 

287 "--dry-run", 

288 is_flag=True, 

289 help=unwrap( 

290 """Display the datasets that would be removed but do not remove them. 

291 

292 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

293 will remove it from all of them, even though the only one this will show is its RUN 

294 collection.""" 

295 ), 

296) 

297 

298 

299quiet_option = MWOptionDecorator( 

300 "--quiet", 

301 is_flag=True, 

302 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed."""), 

303) 

304 

305 

306@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

307@repo_argument(required=True) 

308@collections_argument( 

309 help=unwrap( 

310 """COLLECTIONS is or more expressions that identify the collections to 

311 search for datasets. Glob-style expressions may be used but only if the 

312 --find-all flag is also passed.""" 

313 ) 

314) 

315@option_section("Query Datasets Options:") 

316@datasets_option( 

317 help="One or more glob-style expressions that identify the dataset types to be pruned.", 

318 multiple=True, 

319 callback=split_commas, 

320) 

321@find_all_option() 

322@where_option(help=where_help) 

323@option_section("Prune Options:") 

324@disassociate_option() 

325@purge_option() 

326@unstore_option() 

327@option_section("Execution Options:") 

328@dry_run_option() 

329@confirm_option() 

330@quiet_option() 

331@option_section("Other Options:") 

332@options_file_option() 

333def prune_datasets(**kwargs: Any) -> None: 

334 """Query for and remove one or more datasets from a collection and/or 

335 storage. 

336 """ 

337 quiet = kwargs.pop("quiet", False) 

338 if quiet: 

339 if kwargs["dry_run"]: 

340 raise click.ClickException(message=pruneDatasets_errQuietWithDryRun) 

341 kwargs["confirm"] = False 

342 

343 result = script.pruneDatasets(**kwargs) 

344 

345 if result.errPurgeAndDisassociate: 

346 raise click.ClickException(message=pruneDatasets_errPurgeAndDisassociate) 

347 if result.errNoCollectionRestriction: 

348 raise click.ClickException(message=pruneDatasets_errNoCollectionRestriction) 

349 if result.errPruneOnNotRun: 

350 raise click.ClickException(message=pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

351 if result.errNoOp: 

352 raise click.ClickException(message=pruneDatasets_errNoOp) 

353 if result.dryRun: 

354 assert result.action is not None, "Dry run results have not been set up properly." 

355 if result.action["disassociate"] and result.action["unstore"]: 

356 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

357 elif result.action["disassociate"]: 

358 msg = pruneDatasets_wouldDisassociateMsg 

359 else: 

360 msg = pruneDatasets_wouldRemoveMsg 

361 print(msg.format(**result.action)) 

362 printAstropyTables(result.tables) 

363 return 

364 if result.confirm: 

365 if not result.tables: 

366 print(pruneDatasets_noDatasetsFound) 

367 return 

368 print(pruneDatasets_willRemoveMsg) 

369 printAstropyTables(result.tables) 

370 doContinue = click.confirm(text=pruneDatasets_askContinueMsg, default=False) 

371 if doContinue: 

372 if result.onConfirmation: 

373 result.onConfirmation() 

374 print(pruneDatasets_didRemoveAforementioned) 

375 else: 

376 print(pruneDatasets_didNotRemoveAforementioned) 

377 return 

378 if result.finished: 

379 if not quiet: 

380 print(pruneDatasets_didRemoveMsg) 

381 printAstropyTables(result.tables) 

382 return 

383 

384 

385@click.command(short_help="Search for collections.", cls=ButlerCommand) 

386@repo_argument(required=True) 

387@glob_argument( 

388 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

389 "collections to return." 

390) 

391@collection_type_option() 

392@click.option( 

393 "--chains", 

394 default="TREE", 

395 help="""Affects how results are presented: 

396 

397 TABLE lists each dataset in table form, with columns for dataset name 

398 and type, and a column that lists children of CHAINED datasets (if any 

399 CHAINED datasets are found). 

400 

401 INVERSE-TABLE is like TABLE but instead of a column listing CHAINED 

402 dataset children, it lists the parents of the dataset if it is contained 

403 in any CHAINED collections. 

404 

405 TREE recursively lists children below each CHAINED dataset in tree form. 

406 

407 INVERSE-TREE recursively lists parent datasets below each dataset in 

408 tree form. 

409 

410 FLATTEN lists all datasets, including child datasets, in one list. 

411 

412 [default: TREE]""", 

413 # above, the default value is included, instead of using show_default, so 

414 # that the default is printed on its own line instead of coming right after 

415 # the FLATTEN text. 

416 callback=to_upper, 

417 type=click.Choice( 

418 choices=("TABLE", "INVERSE-TABLE", "TREE", "INVERSE-TREE", "FLATTEN"), 

419 case_sensitive=False, 

420 ), 

421) 

422@options_file_option() 

423def query_collections(*args: Any, **kwargs: Any) -> None: 

424 """Get the collections whose names match an expression.""" 

425 table = script.queryCollections(*args, **kwargs) 

426 # The unit test that mocks script.queryCollections does not return a table 

427 # so we need the following `if`. 

428 if table: 

429 # When chains==TREE, the children of chained datasets are indented 

430 # relative to their parents. For this to work properly the table must 

431 # be left-aligned. 

432 table.pprint_all(align="<") 

433 

434 

435@click.command(cls=ButlerCommand) 

436@repo_argument(required=True) 

437@glob_argument( 

438 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

439 "dataset types to return." 

440) 

441@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

442@components_option() 

443@options_file_option() 

444def query_dataset_types(*args: Any, **kwargs: Any) -> None: 

445 """Get the dataset types in a repository.""" 

446 table = script.queryDatasetTypes(*args, **kwargs) 

447 if table: 

448 table.pprint_all() 

449 else: 

450 print("No results. Try --help for more information.") 

451 

452 

453@click.command(cls=ButlerCommand) 

454@repo_argument(required=True) 

455@click.argument("dataset-type-name", nargs=-1) 

456def remove_dataset_type(*args: Any, **kwargs: Any) -> None: 

457 """Remove the dataset type definitions from a repository.""" 

458 script.removeDatasetType(*args, **kwargs) 

459 

460 

461@click.command(cls=ButlerCommand) 

462@query_datasets_options() 

463@options_file_option() 

464def query_datasets(**kwargs: Any) -> None: 

465 """List the datasets in a repository.""" 

466 for table in script.QueryDatasets(**kwargs).getTables(): 

467 print("") 

468 table.pprint_all() 

469 print("") 

470 

471 

472@click.command(cls=ButlerCommand) 

473@repo_argument(required=True) 

474@click.argument("input-collection") 

475@click.argument("output-collection") 

476@click.argument("dataset-type-name") 

477@click.option( 

478 "--begin-date", 

479 type=str, 

480 default=None, 

481 help=unwrap( 

482 """ISO-8601 datetime (TAI) of the beginning of the validity range for the 

483 certified calibrations.""" 

484 ), 

485) 

486@click.option( 

487 "--end-date", 

488 type=str, 

489 default=None, 

490 help=unwrap( 

491 """ISO-8601 datetime (TAI) of the end of the validity range for the 

492 certified calibrations.""" 

493 ), 

494) 

495@click.option( 

496 "--search-all-inputs", 

497 is_flag=True, 

498 default=False, 

499 help=unwrap( 

500 """Search all children of the inputCollection if it is a CHAINED collection, 

501 instead of just the most recent one.""" 

502 ), 

503) 

504@options_file_option() 

505def certify_calibrations(*args: Any, **kwargs: Any) -> None: 

506 """Certify calibrations in a repository.""" 

507 script.certifyCalibrations(*args, **kwargs) 

508 

509 

510@click.command(cls=ButlerCommand) 

511@repo_argument(required=True) 

512@dimensions_argument( 

513 help=unwrap( 

514 """DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

515 instrument, or tract. Will be expanded to include any dependencies.""" 

516 ) 

517) 

518@collections_option(help=collections_option.help + " May only be used with --datasets.") 

519@datasets_option( 

520 help=unwrap( 

521 """An expression that fully or partially identifies dataset types that should 

522 constrain the yielded data IDs. For example, including "raw" here would 

523 constrain the yielded "instrument", "exposure", "detector", and 

524 "physical_filter" values to only those for which at least one "raw" dataset 

525 exists in "collections". Requires --collections.""" 

526 ) 

527) 

528@where_option(help=where_help) 

529@order_by_option() 

530@limit_option() 

531@offset_option() 

532@options_file_option() 

533def query_data_ids(**kwargs: Any) -> None: 

534 """List the data IDs in a repository.""" 

535 table, reason = script.queryDataIds(**kwargs) 

536 if table: 

537 table.pprint_all() 

538 else: 

539 if reason: 

540 print(reason) 

541 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

542 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

543 else: 

544 print("No results. Try --help for more information.") 

545 

546 

547@click.command(cls=ButlerCommand) 

548@repo_argument(required=True) 

549@element_argument(required=True) 

550@datasets_option( 

551 help=unwrap( 

552 """An expression that fully or partially identifies dataset types that should 

553 constrain the yielded records. May only be used with 

554 --collections.""" 

555 ) 

556) 

557@collections_option(help=collections_option.help + " May only be used with --datasets.") 

558@where_option(help=where_help) 

559@order_by_option() 

560@limit_option() 

561@offset_option() 

562@click.option( 

563 "--no-check", 

564 is_flag=True, 

565 help=unwrap( 

566 """Don't check the query before execution. By default the query is checked before it 

567 executed, this may reject some valid queries that resemble common mistakes.""" 

568 ), 

569) 

570@options_file_option() 

571def query_dimension_records(**kwargs: Any) -> None: 

572 """Query for dimension information.""" 

573 table = script.queryDimensionRecords(**kwargs) 

574 if table: 

575 table.pprint_all() 

576 else: 

577 print("No results. Try --help for more information.") 

578 

579 

580@click.command(cls=ButlerCommand) 

581@repo_argument(required=True) 

582@query_datasets_options(showUri=False, useArguments=False, repo=False) 

583@destination_argument(help="Destination URI of folder to receive file artifacts.") 

584@transfer_option() 

585@verbose_option(help="Report destination location of all transferred artifacts.") 

586@click.option( 

587 "--preserve-path/--no-preserve-path", 

588 is_flag=True, 

589 default=True, 

590 help="Preserve the datastore path to the artifact at the destination.", 

591) 

592@click.option( 

593 "--clobber/--no-clobber", 

594 is_flag=True, 

595 default=False, 

596 help="If clobber, overwrite files if they exist locally.", 

597) 

598@options_file_option() 

599def retrieve_artifacts(**kwargs: Any) -> None: 

600 """Retrieve file artifacts associated with datasets in a repository.""" 

601 verbose = kwargs.pop("verbose") 

602 transferred = script.retrieveArtifacts(**kwargs) 

603 if verbose and transferred: 

604 print(f"Transferred the following to {kwargs['destination']}:") 

605 for uri in transferred: 

606 print(uri) 

607 print() 

608 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

609 

610 

611@click.command(cls=ButlerCommand) 

612@click.argument("source", required=True) 

613@click.argument("dest", required=True) 

614@query_datasets_options(showUri=False, useArguments=False, repo=False) 

615@transfer_option() 

616@register_dataset_types_option() 

617@transfer_dimensions_option() 

618@options_file_option() 

619def transfer_datasets(**kwargs: Any) -> None: 

620 """Transfer datasets from a source butler to a destination butler. 

621 

622 SOURCE is a URI to the Butler repository containing the RUN dataset. 

623 

624 DEST is a URI to the Butler repository that will receive copies of the 

625 datasets. 

626 """ 

627 number = script.transferDatasets(**kwargs) 

628 print(f"Number of datasets transferred: {number}") 

629 

630 

631@click.command(cls=ButlerCommand) 

632@repo_argument(required=True) 

633@click.argument("parent", required=True, nargs=1) 

634@click.argument("children", required=False, nargs=-1, callback=split_commas) 

635@click.option( 

636 "--doc", 

637 default="", 

638 help="Documentation string associated with this collection. " 

639 "Only relevant if the collection is newly created.", 

640) 

641@click.option( 

642 "--flatten/--no-flatten", 

643 default=False, 

644 help="If `True` recursively flatten out any nested chained collections in children first.", 

645) 

646@click.option( 

647 "--mode", 

648 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

649 default="redefine", 

650 help="Update mode: " 

651 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

652 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

653 "'pop': Pop a numbered element off the chain. Defaults to popping " 

654 "the first element (0). ``children`` must be integers if given. " 

655 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

656 "'extend': Modify existing chain to extend it with the supplied CHILDREN.", 

657) 

658def collection_chain(**kwargs: Any) -> None: 

659 """Define a collection chain. 

660 

661 PARENT is the name of the chained collection to create or modify. If the 

662 collection already exists the chain associated with it will be updated. 

663 

664 CHILDREN are the collections to be used to modify the chain. The supplied 

665 values will be split on comma. The exact usage depends on the MODE option. 

666 For example, 

667 

668 $ butler collection-chain REPO PARENT child1,child2 child3 

669 

670 will result in three children being included in the chain. 

671 

672 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

673 collections to be removed from the current chain. 

674 MODE 'pop' can take negative integers to indicate removal relative to the 

675 end of the chain, but when doing that '--' must be given to indicate the 

676 end of the options specification. 

677 

678 $ butler collection-chain REPO --mode=pop PARENT -- -1 

679 

680 Will remove the final collection from the chain. 

681 """ 

682 chain = script.collectionChain(**kwargs) 

683 print(f"[{', '.join(chain)}]") 

684 

685 

686@click.command(cls=ButlerCommand) 

687@repo_argument(required=True) 

688@click.argument("dataset_type", required=True) 

689@click.argument("run", required=True) 

690@click.argument("table_file", required=True) 

691@click.option( 

692 "--formatter", 

693 type=str, 

694 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

695 " will be determined from the dataset type and datastore configuration.", 

696) 

697@click.option( 

698 "--id-generation-mode", 

699 default="UNIQUE", 

700 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

701 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

702 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

703 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

704 " repositories.", 

705 callback=to_upper, 

706 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False), 

707) 

708@click.option( 

709 "--data-id", 

710 type=str, 

711 multiple=True, 

712 callback=split_commas, 

713 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

714 " files. This can be used to simplify the table file by removing repeated entries that are" 

715 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

716 " comma separation or multiple command line options.", 

717) 

718@click.option( 

719 "--prefix", 

720 type=str, 

721 help="For relative paths in the table file, specify a prefix to use. The default is to" 

722 " use the current working directory.", 

723) 

724@transfer_option() 

725def ingest_files(**kwargs: Any) -> None: 

726 """Ingest files from table file. 

727 

728 DATASET_TYPE is the name of the dataset type to be associated with these 

729 files. This dataset type must already exist and will not be created by 

730 this command. There can only be one dataset type per invocation of this 

731 command. 

732 

733 RUN is the run to use for the file ingest. 

734 

735 TABLE_FILE refers to a file that can be read by astropy.table with 

736 columns of: 

737 

738 file URI, dimension1, dimension2, ..., dimensionN 

739 

740 where the first column is the URI to the file to be ingested and the 

741 remaining columns define the dataId to associate with that file. 

742 The column names should match the dimensions for the specified dataset 

743 type. Relative file URI by default is assumed to be relative to the 

744 current working directory but can be overridden using the ``--prefix`` 

745 option. 

746 

747 This command does not create dimension records and so any records must 

748 be created by other means. This command should not be used to ingest 

749 raw camera exposures. 

750 """ 

751 script.ingest_files(**kwargs) 

752 

753 

754@click.command(cls=ButlerCommand) 

755@repo_argument(required=True) 

756@click.argument("dataset_type", required=True) 

757@click.argument("storage_class", required=True) 

758@click.argument("dimensions", required=False, nargs=-1) 

759@click.option( 

760 "--is-calibration/--no-is-calibration", 

761 is_flag=True, 

762 default=False, 

763 help="Indicate that this dataset type can be part of a calibration collection.", 

764) 

765def register_dataset_type(**kwargs: Any) -> None: 

766 """Register a new dataset type with this butler repository. 

767 

768 DATASET_TYPE is the name of the dataset type. 

769 

770 STORAGE_CLASS is the name of the StorageClass to be associated with 

771 this dataset type. 

772 

773 DIMENSIONS is a list of all the dimensions relevant to this 

774 dataset type. It can be an empty list. 

775 

776 A component dataset type (such as "something.component") is not a 

777 real dataset type and so can not be defined by this command. They are 

778 automatically derived from the composite dataset type when a composite 

779 storage class is specified. 

780 """ 

781 inserted = script.register_dataset_type(**kwargs) 

782 if inserted: 

783 print("Dataset type successfully registered.") 

784 else: 

785 print("Dataset type already existed in identical form.") 

786 

787 

788@click.command(cls=ButlerCommand) 

789@repo_argument(required=True) 

790@directory_argument(required=True, help="DIRECTORY is the folder to receive the exported calibrations.") 

791@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

792@dataset_type_option(help="Specific DatasetType(s) to export.", multiple=True) 

793@transfer_option() 

794def export_calibs(*args: Any, **kwargs: Any) -> None: 

795 """Export calibrations from the butler for import elsewhere.""" 

796 table = script.exportCalibs(*args, **kwargs) 

797 if table: 

798 table.pprint_all(align="<")