Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 77%

289 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-08-12 09:20 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23__all__ = () 

24 

25from typing import Any 

26 

27import click 

28 

29from ... import script 

30from ..opt import ( 

31 collection_argument, 

32 collection_type_option, 

33 collections_argument, 

34 collections_option, 

35 components_option, 

36 confirm_option, 

37 dataset_type_option, 

38 datasets_option, 

39 destination_argument, 

40 dimensions_argument, 

41 directory_argument, 

42 element_argument, 

43 glob_argument, 

44 limit_option, 

45 offset_option, 

46 options_file_option, 

47 order_by_option, 

48 query_datasets_options, 

49 register_dataset_types_option, 

50 repo_argument, 

51 transfer_dimensions_option, 

52 transfer_option, 

53 verbose_option, 

54 where_option, 

55) 

56from ..utils import ( 

57 ButlerCommand, 

58 MWOptionDecorator, 

59 option_section, 

60 printAstropyTables, 

61 split_commas, 

62 to_upper, 

63 typeStrAcceptsMultiple, 

64 unwrap, 

65 where_help, 

66) 

67 

68willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

69existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

70 

71 

72@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

73@repo_argument(required=True) 

74@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

75@query_datasets_options(repo=False, showUri=False, useArguments=False) 

76@options_file_option() 

77def associate(**kwargs: Any) -> None: 

78 """Add existing datasets to a tagged collection; searches for datasets with 

79 the options and adds them to the named COLLECTION. 

80 """ 

81 script.associate(**kwargs) 

82 

83 

84# The conversion from the import command name to the butler_import function 

85# name for subcommand lookup is implemented in the cli/butler.py, in 

86# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

87# must be reflected in that location. If this becomes a common pattern a better 

88# mechanism should be implemented. 

89@click.command("import", cls=ButlerCommand) 

90@repo_argument(required=True, help=willCreateRepoHelp) 

91@directory_argument(required=True) 

92@transfer_option() 

93@click.option( 

94 "--export-file", 

95 help="Name for the file that contains database information associated with the exported " 

96 "datasets. If this is not an absolute path, does not exist in the current working " 

97 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

98 'to "export.yaml".', 

99 type=str, 

100) 

101@click.option( 

102 "--skip-dimensions", 

103 "-s", 

104 type=str, 

105 multiple=True, 

106 callback=split_commas, 

107 metavar=typeStrAcceptsMultiple, 

108 help="Dimensions that should be skipped during import", 

109) 

110@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

111@options_file_option() 

112def butler_import(*args: Any, **kwargs: Any) -> None: 

113 """Import data into a butler repository.""" 

114 # `reuse_ids`` is not used by `butlerImport`. 

115 reuse_ids = kwargs.pop("reuse_ids", False) 

116 if reuse_ids: 

117 click.echo("WARNING: --reuse-ids option is deprecated and will be removed after v26.") 

118 

119 script.butlerImport(*args, **kwargs) 

120 

121 

122@click.command(cls=ButlerCommand) 

123@repo_argument(required=True, help=willCreateRepoHelp) 

124@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

125@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

126@click.option( 

127 "--standalone", 

128 is_flag=True, 

129 help="Include all defaults in the config file in the repo, " 

130 "insulating the repo from changes in package defaults.", 

131) 

132@click.option( 

133 "--override", is_flag=True, help="Allow values in the supplied config to override all repo settings." 

134) 

135@click.option( 

136 "--outfile", 

137 "-f", 

138 default=None, 

139 type=str, 

140 help="Name of output file to receive repository " 

141 "configuration. Default is to write butler.yaml into the specified repo.", 

142) 

143@options_file_option() 

144def create(*args: Any, **kwargs: Any) -> None: 

145 """Create an empty Gen3 Butler repository.""" 

146 script.createRepo(*args, **kwargs) 

147 

148 

149@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

150@repo_argument(required=True, help=existingRepoHelp) 

151@click.option( 

152 "--subset", 

153 "-s", 

154 type=str, 

155 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

156 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.", 

157) 

158@click.option( 

159 "--searchpath", 

160 "-p", 

161 type=str, 

162 multiple=True, 

163 callback=split_commas, 

164 metavar=typeStrAcceptsMultiple, 

165 help="Additional search paths to use for configuration overrides", 

166) 

167@click.option( 

168 "--file", 

169 "outfile", 

170 type=click.File(mode="w"), 

171 default="-", 

172 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout by default.", 

173) 

174@options_file_option() 

175def config_dump(*args: Any, **kwargs: Any) -> None: 

176 """Dump either a subset or full Butler configuration to standard output.""" 

177 script.configDump(*args, **kwargs) 

178 

179 

180@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

181@repo_argument(required=True, help=existingRepoHelp) 

182@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

183@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

184@click.option( 

185 "--ignore", 

186 "-i", 

187 type=str, 

188 multiple=True, 

189 callback=split_commas, 

190 metavar=typeStrAcceptsMultiple, 

191 help="DatasetType(s) to ignore for validation.", 

192) 

193@options_file_option() 

194def config_validate(*args: Any, **kwargs: Any) -> None: 

195 """Validate the configuration files for a Gen3 Butler repository.""" 

196 is_good = script.configValidate(*args, **kwargs) 

197 if not is_good: 

198 raise click.exceptions.Exit(1) 

199 

200 

201pruneDatasets_wouldRemoveMsg = unwrap( 

202 """The following datasets will be removed from any datastores in which 

203 they are present:""" 

204) 

205pruneDatasets_wouldDisassociateMsg = unwrap( 

206 """The following datasets will be disassociated from {collections} 

207 if they are currently present in it (which is not checked):""" 

208) 

209pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap( 

210 """The following datasets will be disassociated from 

211 {collections} if they are currently present in it (which is 

212 not checked), and removed from any datastores in which they 

213 are present.""" 

214) 

215pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

216pruneDatasets_askContinueMsg = "Continue?" 

217pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

218pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

219pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

220pruneDatasets_noDatasetsFound = "Did not find any datasets." 

221pruneDatasets_errPurgeAndDisassociate = unwrap( 

222 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

223 collections, but --purge forces disassociation from all of them. """ 

224) 

225pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

226pruneDatasets_errNoCollectionRestriction = unwrap( 

227 """Must indicate collections from which to prune datasets by passing COLLECTION arguments (select all 

228 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

229 collection, or by using --disassociate to select a tagged collection.""" 

230) 

231pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

232pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

233 

234disassociate_option = MWOptionDecorator( 

235 "--disassociate", 

236 "disassociate_tags", 

237 help=unwrap( 

238 """Disassociate pruned datasets from the given tagged collections. May not be used with 

239 --purge.""" 

240 ), 

241 multiple=True, 

242 callback=split_commas, 

243 metavar="TAG", 

244) 

245 

246 

247purge_option = MWOptionDecorator( 

248 "--purge", 

249 "purge_run", 

250 help=unwrap( 

251 """Completely remove the dataset from the given RUN in the Registry. May not be used with 

252 --disassociate. Implies --unstore. Note, this may remove provenance information from 

253 datasets other than those provided, and should be used with extreme care. 

254 RUN has to be provided for backward compatibility, but is used only if COLLECTIONS is 

255 not provided. Otherwise, datasets will be removed from 

256 any RUN-type collections in COLLECTIONS.""" 

257 ), 

258 metavar="RUN", 

259) 

260 

261 

262find_all_option = MWOptionDecorator( 

263 "--find-all", 

264 is_flag=True, 

265 help=unwrap( 

266 """Purge the dataset results from all of the collections in which a dataset of that dataset 

267 type + data id combination appear. (By default only the first found dataset type + data id is 

268 purged, according to the order of COLLECTIONS passed in).""" 

269 ), 

270) 

271 

272 

273unstore_option = MWOptionDecorator( 

274 "--unstore", 

275 is_flag=True, 

276 help=unwrap( 

277 """Remove these datasets from all datastores configured with this data repository. If 

278 --disassociate and --purge are not used then --unstore will be used by default. Note that 

279 --unstore will make it impossible to retrieve these datasets even via other collections. 

280 Datasets that are already not stored are ignored by this option.""" 

281 ), 

282) 

283 

284 

285dry_run_option = MWOptionDecorator( 

286 "--dry-run", 

287 is_flag=True, 

288 help=unwrap( 

289 """Display the datasets that would be removed but do not remove them. 

290 

291 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

292 will remove it from all of them, even though the only one this will show is its RUN 

293 collection.""" 

294 ), 

295) 

296 

297 

298quiet_option = MWOptionDecorator( 

299 "--quiet", 

300 is_flag=True, 

301 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed."""), 

302) 

303 

304 

305@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

306@repo_argument(required=True) 

307@collections_argument( 

308 help=unwrap( 

309 """COLLECTIONS is or more expressions that identify the collections to 

310 search for datasets. Glob-style expressions may be used but only if the 

311 --find-all flag is also passed.""" 

312 ) 

313) 

314@option_section("Query Datasets Options:") 

315@datasets_option( 

316 help="One or more glob-style expressions that identify the dataset types to be pruned.", 

317 multiple=True, 

318 callback=split_commas, 

319) 

320@find_all_option() 

321@where_option(help=where_help) 

322@option_section("Prune Options:") 

323@disassociate_option() 

324@purge_option() 

325@unstore_option() 

326@option_section("Execution Options:") 

327@dry_run_option() 

328@confirm_option() 

329@quiet_option() 

330@option_section("Other Options:") 

331@options_file_option() 

332def prune_datasets(**kwargs: Any) -> None: 

333 """Query for and remove one or more datasets from a collection and/or 

334 storage. 

335 """ 

336 quiet = kwargs.pop("quiet", False) 

337 if quiet: 

338 if kwargs["dry_run"]: 

339 raise click.ClickException(message=pruneDatasets_errQuietWithDryRun) 

340 kwargs["confirm"] = False 

341 

342 result = script.pruneDatasets(**kwargs) 

343 

344 if result.errPurgeAndDisassociate: 

345 raise click.ClickException(message=pruneDatasets_errPurgeAndDisassociate) 

346 if result.errNoCollectionRestriction: 

347 raise click.ClickException(message=pruneDatasets_errNoCollectionRestriction) 

348 if result.errPruneOnNotRun: 

349 raise click.ClickException(message=pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

350 if result.errNoOp: 

351 raise click.ClickException(message=pruneDatasets_errNoOp) 

352 if result.dryRun: 

353 assert result.action is not None, "Dry run results have not been set up properly." 

354 if result.action["disassociate"] and result.action["unstore"]: 

355 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

356 elif result.action["disassociate"]: 

357 msg = pruneDatasets_wouldDisassociateMsg 

358 else: 

359 msg = pruneDatasets_wouldRemoveMsg 

360 print(msg.format(**result.action)) 

361 printAstropyTables(result.tables) 

362 return 

363 if result.confirm: 

364 if not result.tables: 

365 print(pruneDatasets_noDatasetsFound) 

366 return 

367 print(pruneDatasets_willRemoveMsg) 

368 printAstropyTables(result.tables) 

369 doContinue = click.confirm(text=pruneDatasets_askContinueMsg, default=False) 

370 if doContinue: 

371 if result.onConfirmation: 

372 result.onConfirmation() 

373 print(pruneDatasets_didRemoveAforementioned) 

374 else: 

375 print(pruneDatasets_didNotRemoveAforementioned) 

376 return 

377 if result.finished: 

378 if not quiet: 

379 print(pruneDatasets_didRemoveMsg) 

380 printAstropyTables(result.tables) 

381 return 

382 

383 

384@click.command(short_help="Search for collections.", cls=ButlerCommand) 

385@repo_argument(required=True) 

386@glob_argument( 

387 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

388 "collections to return." 

389) 

390@collection_type_option() 

391@click.option( 

392 "--chains", 

393 default="TREE", 

394 help="""Affects how results are presented: 

395 

396 TABLE lists each dataset in table form, with columns for dataset name 

397 and type, and a column that lists children of CHAINED datasets (if any 

398 CHAINED datasets are found). 

399 

400 INVERSE-TABLE is like TABLE but instead of a column listing CHAINED 

401 dataset children, it lists the parents of the dataset if it is contained 

402 in any CHAINED collections. 

403 

404 TREE recursively lists children below each CHAINED dataset in tree form. 

405 

406 INVERSE-TREE recursively lists parent datasets below each dataset in 

407 tree form. 

408 

409 FLATTEN lists all datasets, including child datasets, in one list. 

410 

411 [default: TREE]""", 

412 # above, the default value is included, instead of using show_default, so 

413 # that the default is printed on its own line instead of coming right after 

414 # the FLATTEN text. 

415 callback=to_upper, 

416 type=click.Choice( 

417 choices=("TABLE", "INVERSE-TABLE", "TREE", "INVERSE-TREE", "FLATTEN"), 

418 case_sensitive=False, 

419 ), 

420) 

421@options_file_option() 

422def query_collections(*args: Any, **kwargs: Any) -> None: 

423 """Get the collections whose names match an expression.""" 

424 table = script.queryCollections(*args, **kwargs) 

425 # The unit test that mocks script.queryCollections does not return a table 

426 # so we need the following `if`. 

427 if table: 

428 # When chains==TREE, the children of chained datasets are indented 

429 # relative to their parents. For this to work properly the table must 

430 # be left-aligned. 

431 table.pprint_all(align="<") 

432 

433 

434@click.command(cls=ButlerCommand) 

435@repo_argument(required=True) 

436@glob_argument( 

437 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

438 "dataset types to return." 

439) 

440@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

441@components_option() 

442@options_file_option() 

443def query_dataset_types(*args: Any, **kwargs: Any) -> None: 

444 """Get the dataset types in a repository.""" 

445 table = script.queryDatasetTypes(*args, **kwargs) 

446 if table: 

447 table.pprint_all() 

448 else: 

449 print("No results. Try --help for more information.") 

450 

451 

452@click.command(cls=ButlerCommand) 

453@repo_argument(required=True) 

454@click.argument("dataset-type-name", nargs=-1) 

455def remove_dataset_type(*args: Any, **kwargs: Any) -> None: 

456 """Remove the dataset type definitions from a repository.""" 

457 script.removeDatasetType(*args, **kwargs) 

458 

459 

460@click.command(cls=ButlerCommand) 

461@query_datasets_options() 

462@options_file_option() 

463def query_datasets(**kwargs: Any) -> None: 

464 """List the datasets in a repository.""" 

465 for table in script.QueryDatasets(**kwargs).getTables(): 

466 print("") 

467 table.pprint_all() 

468 print("") 

469 

470 

471@click.command(cls=ButlerCommand) 

472@repo_argument(required=True) 

473@click.argument("input-collection") 

474@click.argument("output-collection") 

475@click.argument("dataset-type-name") 

476@click.option( 

477 "--begin-date", 

478 type=str, 

479 default=None, 

480 help=unwrap( 

481 """ISO-8601 datetime (TAI) of the beginning of the validity range for the 

482 certified calibrations.""" 

483 ), 

484) 

485@click.option( 

486 "--end-date", 

487 type=str, 

488 default=None, 

489 help=unwrap( 

490 """ISO-8601 datetime (TAI) of the end of the validity range for the 

491 certified calibrations.""" 

492 ), 

493) 

494@click.option( 

495 "--search-all-inputs", 

496 is_flag=True, 

497 default=False, 

498 help=unwrap( 

499 """Search all children of the inputCollection if it is a CHAINED collection, 

500 instead of just the most recent one.""" 

501 ), 

502) 

503@options_file_option() 

504def certify_calibrations(*args: Any, **kwargs: Any) -> None: 

505 """Certify calibrations in a repository.""" 

506 script.certifyCalibrations(*args, **kwargs) 

507 

508 

509@click.command(cls=ButlerCommand) 

510@repo_argument(required=True) 

511@dimensions_argument( 

512 help=unwrap( 

513 """DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

514 instrument, or tract. Will be expanded to include any dependencies.""" 

515 ) 

516) 

517@collections_option(help=collections_option.help + " May only be used with --datasets.") 

518@datasets_option( 

519 help=unwrap( 

520 """An expression that fully or partially identifies dataset types that should 

521 constrain the yielded data IDs. For example, including "raw" here would 

522 constrain the yielded "instrument", "exposure", "detector", and 

523 "physical_filter" values to only those for which at least one "raw" dataset 

524 exists in "collections". Requires --collections.""" 

525 ) 

526) 

527@where_option(help=where_help) 

528@order_by_option() 

529@limit_option() 

530@offset_option() 

531@options_file_option() 

532def query_data_ids(**kwargs: Any) -> None: 

533 """List the data IDs in a repository.""" 

534 table, reason = script.queryDataIds(**kwargs) 

535 if table: 

536 table.pprint_all() 

537 else: 

538 if reason: 

539 print(reason) 

540 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

541 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

542 else: 

543 print("No results. Try --help for more information.") 

544 

545 

546@click.command(cls=ButlerCommand) 

547@repo_argument(required=True) 

548@element_argument(required=True) 

549@datasets_option( 

550 help=unwrap( 

551 """An expression that fully or partially identifies dataset types that should 

552 constrain the yielded records. May only be used with 

553 --collections.""" 

554 ) 

555) 

556@collections_option(help=collections_option.help + " May only be used with --datasets.") 

557@where_option(help=where_help) 

558@order_by_option() 

559@limit_option() 

560@offset_option() 

561@click.option( 

562 "--no-check", 

563 is_flag=True, 

564 help=unwrap( 

565 """Don't check the query before execution. By default the query is checked before it 

566 executed, this may reject some valid queries that resemble common mistakes.""" 

567 ), 

568) 

569@options_file_option() 

570def query_dimension_records(**kwargs: Any) -> None: 

571 """Query for dimension information.""" 

572 table = script.queryDimensionRecords(**kwargs) 

573 if table: 

574 table.pprint_all() 

575 else: 

576 print("No results. Try --help for more information.") 

577 

578 

579@click.command(cls=ButlerCommand) 

580@repo_argument(required=True) 

581@query_datasets_options(showUri=False, useArguments=False, repo=False) 

582@destination_argument(help="Destination URI of folder to receive file artifacts.") 

583@transfer_option() 

584@verbose_option(help="Report destination location of all transferred artifacts.") 

585@click.option( 

586 "--preserve-path/--no-preserve-path", 

587 is_flag=True, 

588 default=True, 

589 help="Preserve the datastore path to the artifact at the destination.", 

590) 

591@click.option( 

592 "--clobber/--no-clobber", 

593 is_flag=True, 

594 default=False, 

595 help="If clobber, overwrite files if they exist locally.", 

596) 

597@options_file_option() 

598def retrieve_artifacts(**kwargs: Any) -> None: 

599 """Retrieve file artifacts associated with datasets in a repository.""" 

600 verbose = kwargs.pop("verbose") 

601 transferred = script.retrieveArtifacts(**kwargs) 

602 if verbose and transferred: 

603 print(f"Transferred the following to {kwargs['destination']}:") 

604 for uri in transferred: 

605 print(uri) 

606 print() 

607 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

608 

609 

610@click.command(cls=ButlerCommand) 

611@click.argument("source", required=True) 

612@click.argument("dest", required=True) 

613@query_datasets_options(showUri=False, useArguments=False, repo=False) 

614@transfer_option() 

615@register_dataset_types_option() 

616@transfer_dimensions_option() 

617@options_file_option() 

618def transfer_datasets(**kwargs: Any) -> None: 

619 """Transfer datasets from a source butler to a destination butler. 

620 

621 SOURCE is a URI to the Butler repository containing the RUN dataset. 

622 

623 DEST is a URI to the Butler repository that will receive copies of the 

624 datasets. 

625 """ 

626 number = script.transferDatasets(**kwargs) 

627 print(f"Number of datasets transferred: {number}") 

628 

629 

630@click.command(cls=ButlerCommand) 

631@repo_argument(required=True) 

632@click.argument("parent", required=True, nargs=1) 

633@click.argument("children", required=False, nargs=-1, callback=split_commas) 

634@click.option( 

635 "--doc", 

636 default="", 

637 help="Documentation string associated with this collection. " 

638 "Only relevant if the collection is newly created.", 

639) 

640@click.option( 

641 "--flatten/--no-flatten", 

642 default=False, 

643 help="If `True` recursively flatten out any nested chained collections in children first.", 

644) 

645@click.option( 

646 "--mode", 

647 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

648 default="redefine", 

649 help="Update mode: " 

650 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

651 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

652 "'pop': Pop a numbered element off the chain. Defaults to popping " 

653 "the first element (0). ``children`` must be integers if given. " 

654 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

655 "'extend': Modify existing chain to extend it with the supplied CHILDREN.", 

656) 

657def collection_chain(**kwargs: Any) -> None: 

658 """Define a collection chain. 

659 

660 PARENT is the name of the chained collection to create or modify. If the 

661 collection already exists the chain associated with it will be updated. 

662 

663 CHILDREN are the collections to be used to modify the chain. The supplied 

664 values will be split on comma. The exact usage depends on the MODE option. 

665 For example, 

666 

667 $ butler collection-chain REPO PARENT child1,child2 child3 

668 

669 will result in three children being included in the chain. 

670 

671 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

672 collections to be removed from the current chain. 

673 MODE 'pop' can take negative integers to indicate removal relative to the 

674 end of the chain, but when doing that '--' must be given to indicate the 

675 end of the options specification. 

676 

677 $ butler collection-chain REPO --mode=pop PARENT -- -1 

678 

679 Will remove the final collection from the chain. 

680 """ 

681 chain = script.collectionChain(**kwargs) 

682 print(f"[{', '.join(chain)}]") 

683 

684 

685@click.command(cls=ButlerCommand) 

686@repo_argument(required=True) 

687@click.argument("dataset_type", required=True) 

688@click.argument("run", required=True) 

689@click.argument("table_file", required=True) 

690@click.option( 

691 "--formatter", 

692 type=str, 

693 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

694 " will be determined from the dataset type and datastore configuration.", 

695) 

696@click.option( 

697 "--id-generation-mode", 

698 default="UNIQUE", 

699 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

700 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

701 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

702 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

703 " repositories.", 

704 callback=to_upper, 

705 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False), 

706) 

707@click.option( 

708 "--data-id", 

709 type=str, 

710 multiple=True, 

711 callback=split_commas, 

712 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

713 " files. This can be used to simplify the table file by removing repeated entries that are" 

714 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

715 " comma separation or multiple command line options.", 

716) 

717@click.option( 

718 "--prefix", 

719 type=str, 

720 help="For relative paths in the table file, specify a prefix to use. The default is to" 

721 " use the current working directory.", 

722) 

723@transfer_option() 

724def ingest_files(**kwargs: Any) -> None: 

725 """Ingest files from table file. 

726 

727 DATASET_TYPE is the name of the dataset type to be associated with these 

728 files. This dataset type must already exist and will not be created by 

729 this command. There can only be one dataset type per invocation of this 

730 command. 

731 

732 RUN is the run to use for the file ingest. 

733 

734 TABLE_FILE refers to a file that can be read by astropy.table with 

735 columns of: 

736 

737 file URI, dimension1, dimension2, ..., dimensionN 

738 

739 where the first column is the URI to the file to be ingested and the 

740 remaining columns define the dataId to associate with that file. 

741 The column names should match the dimensions for the specified dataset 

742 type. Relative file URI by default is assumed to be relative to the 

743 current working directory but can be overridden using the ``--prefix`` 

744 option. 

745 

746 This command does not create dimension records and so any records must 

747 be created by other means. This command should not be used to ingest 

748 raw camera exposures. 

749 """ 

750 script.ingest_files(**kwargs) 

751 

752 

753@click.command(cls=ButlerCommand) 

754@repo_argument(required=True) 

755@click.argument("dataset_type", required=True) 

756@click.argument("storage_class", required=True) 

757@click.argument("dimensions", required=False, nargs=-1) 

758@click.option( 

759 "--is-calibration/--no-is-calibration", 

760 is_flag=True, 

761 default=False, 

762 help="Indicate that this dataset type can be part of a calibration collection.", 

763) 

764def register_dataset_type(**kwargs: Any) -> None: 

765 """Register a new dataset type with this butler repository. 

766 

767 DATASET_TYPE is the name of the dataset type. 

768 

769 STORAGE_CLASS is the name of the StorageClass to be associated with 

770 this dataset type. 

771 

772 DIMENSIONS is a list of all the dimensions relevant to this 

773 dataset type. It can be an empty list. 

774 

775 A component dataset type (such as "something.component") is not a 

776 real dataset type and so can not be defined by this command. They are 

777 automatically derived from the composite dataset type when a composite 

778 storage class is specified. 

779 """ 

780 inserted = script.register_dataset_type(**kwargs) 

781 if inserted: 

782 print("Dataset type successfully registered.") 

783 else: 

784 print("Dataset type already existed in identical form.") 

785 

786 

787@click.command(cls=ButlerCommand) 

788@repo_argument(required=True) 

789@directory_argument(required=True, help="DIRECTORY is the folder to receive the exported calibrations.") 

790@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

791@dataset_type_option(help="Specific DatasetType(s) to export.", multiple=True) 

792@transfer_option() 

793def export_calibs(*args: Any, **kwargs: Any) -> None: 

794 """Export calibrations from the butler for import elsewhere.""" 

795 table = script.exportCalibs(*args, **kwargs) 

796 if table: 

797 table.pprint_all(align="<")