Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 56%

286 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-05-09 02:11 -0700

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23__all__ = () 

24 

25from typing import Any 

26 

27import click 

28 

29from ... import script 

30from ..opt import ( 

31 collection_argument, 

32 collection_type_option, 

33 collections_argument, 

34 collections_option, 

35 components_option, 

36 confirm_option, 

37 dataset_type_option, 

38 datasets_option, 

39 destination_argument, 

40 dimensions_argument, 

41 directory_argument, 

42 element_argument, 

43 glob_argument, 

44 limit_option, 

45 offset_option, 

46 options_file_option, 

47 order_by_option, 

48 query_datasets_options, 

49 register_dataset_types_option, 

50 repo_argument, 

51 transfer_dimensions_option, 

52 transfer_option, 

53 verbose_option, 

54 where_option, 

55) 

56from ..utils import ( 

57 ButlerCommand, 

58 MWOptionDecorator, 

59 option_section, 

60 printAstropyTables, 

61 split_commas, 

62 to_upper, 

63 typeStrAcceptsMultiple, 

64 unwrap, 

65 where_help, 

66) 

67 

68willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

69existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

70 

71 

72@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

73@repo_argument(required=True) 

74@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

75@query_datasets_options(repo=False, showUri=False, useArguments=False) 

76@options_file_option() 

77def associate(**kwargs: Any) -> None: 

78 """Add existing datasets to a tagged collection; searches for datasets with 

79 the options and adds them to the named COLLECTION. 

80 """ 

81 script.associate(**kwargs) 

82 

83 

84# The conversion from the import command name to the butler_import function 

85# name for subcommand lookup is implemented in the cli/butler.py, in 

86# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

87# must be reflected in that location. If this becomes a common pattern a better 

88# mechanism should be implemented. 

89@click.command("import", cls=ButlerCommand) 

90@repo_argument(required=True, help=willCreateRepoHelp) 

91@directory_argument(required=True) 

92@transfer_option() 

93@click.option( 

94 "--export-file", 

95 help="Name for the file that contains database information associated with the exported " 

96 "datasets. If this is not an absolute path, does not exist in the current working " 

97 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

98 'to "export.yaml".', 

99 type=str, 

100) 

101@click.option( 

102 "--skip-dimensions", 

103 "-s", 

104 type=str, 

105 multiple=True, 

106 callback=split_commas, 

107 metavar=typeStrAcceptsMultiple, 

108 help="Dimensions that should be skipped during import", 

109) 

110@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

111@options_file_option() 

112def butler_import(*args: Any, **kwargs: Any) -> None: 

113 """Import data into a butler repository.""" 

114 script.butlerImport(*args, **kwargs) 

115 

116 

117@click.command(cls=ButlerCommand) 

118@repo_argument(required=True, help=willCreateRepoHelp) 

119@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

120@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

121@click.option( 

122 "--standalone", 

123 is_flag=True, 

124 help="Include all defaults in the config file in the repo, " 

125 "insulating the repo from changes in package defaults.", 

126) 

127@click.option( 

128 "--override", is_flag=True, help="Allow values in the supplied config to override all repo settings." 

129) 

130@click.option( 

131 "--outfile", 

132 "-f", 

133 default=None, 

134 type=str, 

135 help="Name of output file to receive repository " 

136 "configuration. Default is to write butler.yaml into the specified repo.", 

137) 

138@options_file_option() 

139def create(*args: Any, **kwargs: Any) -> None: 

140 """Create an empty Gen3 Butler repository.""" 

141 script.createRepo(*args, **kwargs) 

142 

143 

144@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

145@repo_argument(required=True, help=existingRepoHelp) 

146@click.option( 

147 "--subset", 

148 "-s", 

149 type=str, 

150 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

151 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.", 

152) 

153@click.option( 

154 "--searchpath", 

155 "-p", 

156 type=str, 

157 multiple=True, 

158 callback=split_commas, 

159 metavar=typeStrAcceptsMultiple, 

160 help="Additional search paths to use for configuration overrides", 

161) 

162@click.option( 

163 "--file", 

164 "outfile", 

165 type=click.File(mode="w"), 

166 default="-", 

167 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout by default.", 

168) 

169@options_file_option() 

170def config_dump(*args: Any, **kwargs: Any) -> None: 

171 """Dump either a subset or full Butler configuration to standard output.""" 

172 script.configDump(*args, **kwargs) 

173 

174 

175@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

176@repo_argument(required=True, help=existingRepoHelp) 

177@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

178@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

179@click.option( 

180 "--ignore", 

181 "-i", 

182 type=str, 

183 multiple=True, 

184 callback=split_commas, 

185 metavar=typeStrAcceptsMultiple, 

186 help="DatasetType(s) to ignore for validation.", 

187) 

188@options_file_option() 

189def config_validate(*args: Any, **kwargs: Any) -> None: 

190 """Validate the configuration files for a Gen3 Butler repository.""" 

191 is_good = script.configValidate(*args, **kwargs) 

192 if not is_good: 

193 raise click.exceptions.Exit(1) 

194 

195 

196pruneDatasets_wouldRemoveMsg = unwrap( 

197 """The following datasets will be removed from any datastores in which 

198 they are present:""" 

199) 

200pruneDatasets_wouldDisassociateMsg = unwrap( 

201 """The following datasets will be disassociated from {collections} 

202 if they are currently present in it (which is not checked):""" 

203) 

204pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap( 

205 """The following datasets will be disassociated from 

206 {collections} if they are currently present in it (which is 

207 not checked), and removed from any datastores in which they 

208 are present.""" 

209) 

210pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

211pruneDatasets_askContinueMsg = "Continue?" 

212pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

213pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

214pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

215pruneDatasets_noDatasetsFound = "Did not find any datasets." 

216pruneDatasets_errPurgeAndDisassociate = unwrap( 

217 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

218 collections, but --purge forces disassociation from all of them. """ 

219) 

220pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

221pruneDatasets_errNoCollectionRestriction = unwrap( 

222 """Must indicate collections from which to prune datasets by passing COLLECTION arguments (select all 

223 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

224 collection, or by using --disassociate to select a tagged collection.""" 

225) 

226pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

227pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

228 

229disassociate_option = MWOptionDecorator( 

230 "--disassociate", 

231 "disassociate_tags", 

232 help=unwrap( 

233 """Disassociate pruned datasets from the given tagged collections. May not be used with 

234 --purge.""" 

235 ), 

236 multiple=True, 

237 callback=split_commas, 

238 metavar="TAG", 

239) 

240 

241 

242purge_option = MWOptionDecorator( 

243 "--purge", 

244 "purge_run", 

245 help=unwrap( 

246 """Completely remove the dataset from the given RUN in the Registry. May not be used with 

247 --disassociate. Implies --unstore. Note, this may remove provenance information from 

248 datasets other than those provided, and should be used with extreme care. 

249 RUN has to be provided for backward compatibility, but is used only if COLLECTIONS is 

250 not provided. Otherwise, datasets will be removed from 

251 any RUN-type collections in COLLECTIONS.""" 

252 ), 

253 metavar="RUN", 

254) 

255 

256 

257find_all_option = MWOptionDecorator( 

258 "--find-all", 

259 is_flag=True, 

260 help=unwrap( 

261 """Purge the dataset results from all of the collections in which a dataset of that dataset 

262 type + data id combination appear. (By default only the first found dataset type + data id is 

263 purged, according to the order of COLLECTIONS passed in).""" 

264 ), 

265) 

266 

267 

268unstore_option = MWOptionDecorator( 

269 "--unstore", 

270 is_flag=True, 

271 help=unwrap( 

272 """Remove these datasets from all datastores configured with this data repository. If 

273 --disassociate and --purge are not used then --unstore will be used by default. Note that 

274 --unstore will make it impossible to retrieve these datasets even via other collections. 

275 Datasets that are already not stored are ignored by this option.""" 

276 ), 

277) 

278 

279 

280dry_run_option = MWOptionDecorator( 

281 "--dry-run", 

282 is_flag=True, 

283 help=unwrap( 

284 """Display the datasets that would be removed but do not remove them. 

285 

286 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

287 will remove it from all of them, even though the only one this will show is its RUN 

288 collection.""" 

289 ), 

290) 

291 

292 

293quiet_option = MWOptionDecorator( 

294 "--quiet", 

295 is_flag=True, 

296 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed."""), 

297) 

298 

299 

300@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

301@repo_argument(required=True) 

302@collections_argument( 

303 help=unwrap( 

304 """COLLECTIONS is or more expressions that identify the collections to 

305 search for datasets. Glob-style expressions may be used but only if the 

306 --find-all flag is also passed.""" 

307 ) 

308) 

309@option_section("Query Datasets Options:") 

310@datasets_option( 

311 help="One or more glob-style expressions that identify the dataset types to be pruned.", 

312 multiple=True, 

313 callback=split_commas, 

314) 

315@find_all_option() 

316@where_option(help=where_help) 

317@option_section("Prune Options:") 

318@disassociate_option() 

319@purge_option() 

320@unstore_option() 

321@option_section("Execution Options:") 

322@dry_run_option() 

323@confirm_option() 

324@quiet_option() 

325@option_section("Other Options:") 

326@options_file_option() 

327def prune_datasets(**kwargs: Any) -> None: 

328 """Query for and remove one or more datasets from a collection and/or 

329 storage. 

330 """ 

331 quiet = kwargs.pop("quiet", False) 

332 if quiet: 

333 if kwargs["dry_run"]: 

334 raise click.ClickException(message=pruneDatasets_errQuietWithDryRun) 

335 kwargs["confirm"] = False 

336 

337 result = script.pruneDatasets(**kwargs) 

338 

339 if result.errPurgeAndDisassociate: 

340 raise click.ClickException(message=pruneDatasets_errPurgeAndDisassociate) 

341 if result.errNoCollectionRestriction: 

342 raise click.ClickException(message=pruneDatasets_errNoCollectionRestriction) 

343 if result.errPruneOnNotRun: 

344 raise click.ClickException(message=pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

345 if result.errNoOp: 

346 raise click.ClickException(message=pruneDatasets_errNoOp) 

347 if result.dryRun: 

348 assert result.action is not None, "Dry run results have not been set up properly." 

349 if result.action["disassociate"] and result.action["unstore"]: 

350 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

351 elif result.action["disassociate"]: 

352 msg = pruneDatasets_wouldDisassociateMsg 

353 else: 

354 msg = pruneDatasets_wouldRemoveMsg 

355 print(msg.format(**result.action)) 

356 printAstropyTables(result.tables) 

357 return 

358 if result.confirm: 

359 if not result.tables: 

360 print(pruneDatasets_noDatasetsFound) 

361 return 

362 print(pruneDatasets_willRemoveMsg) 

363 printAstropyTables(result.tables) 

364 doContinue = click.confirm(text=pruneDatasets_askContinueMsg, default=False) 

365 if doContinue: 

366 if result.onConfirmation: 

367 result.onConfirmation() 

368 print(pruneDatasets_didRemoveAforementioned) 

369 else: 

370 print(pruneDatasets_didNotRemoveAforementioned) 

371 return 

372 if result.finished: 

373 if not quiet: 

374 print(pruneDatasets_didRemoveMsg) 

375 printAstropyTables(result.tables) 

376 return 

377 

378 

379@click.command(short_help="Search for collections.", cls=ButlerCommand) 

380@repo_argument(required=True) 

381@glob_argument( 

382 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

383 "collections to return." 

384) 

385@collection_type_option() 

386@click.option( 

387 "--chains", 

388 default="TREE", 

389 help="""Affects how results are presented: 

390 

391 TABLE lists each dataset in table form, with columns for dataset name 

392 and type, and a column that lists children of CHAINED datasets (if any 

393 CHAINED datasets are found). 

394 

395 INVERSE-TABLE is like TABLE but instead of a column listing CHAINED 

396 dataset children, it lists the parents of the dataset if it is contained 

397 in any CHAINED collections. 

398 

399 TREE recursively lists children below each CHAINED dataset in tree form. 

400 

401 INVERSE-TREE recursively lists parent datasets below each dataset in 

402 tree form. 

403 

404 FLATTEN lists all datasets, including child datasets, in one list. 

405 

406 [default: TREE]""", 

407 # above, the default value is included, instead of using show_default, so 

408 # that the default is printed on its own line instead of coming right after 

409 # the FLATTEN text. 

410 callback=to_upper, 

411 type=click.Choice( 

412 choices=("TABLE", "INVERSE-TABLE", "TREE", "INVERSE-TREE", "FLATTEN"), 

413 case_sensitive=False, 

414 ), 

415) 

416@options_file_option() 

417def query_collections(*args: Any, **kwargs: Any) -> None: 

418 """Get the collections whose names match an expression.""" 

419 table = script.queryCollections(*args, **kwargs) 

420 # The unit test that mocks script.queryCollections does not return a table 

421 # so we need the following `if`. 

422 if table: 

423 # When chains==TREE, the children of chained datasets are indented 

424 # relative to their parents. For this to work properly the table must 

425 # be left-aligned. 

426 table.pprint_all(align="<") 

427 

428 

429@click.command(cls=ButlerCommand) 

430@repo_argument(required=True) 

431@glob_argument( 

432 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

433 "dataset types to return." 

434) 

435@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

436@components_option() 

437@options_file_option() 

438def query_dataset_types(*args: Any, **kwargs: Any) -> None: 

439 """Get the dataset types in a repository.""" 

440 table = script.queryDatasetTypes(*args, **kwargs) 

441 if table: 

442 table.pprint_all() 

443 else: 

444 print("No results. Try --help for more information.") 

445 

446 

447@click.command(cls=ButlerCommand) 

448@repo_argument(required=True) 

449@click.argument("dataset-type-name", nargs=-1) 

450def remove_dataset_type(*args: Any, **kwargs: Any) -> None: 

451 """Remove the dataset type definitions from a repository.""" 

452 script.removeDatasetType(*args, **kwargs) 

453 

454 

455@click.command(cls=ButlerCommand) 

456@query_datasets_options() 

457@options_file_option() 

458def query_datasets(**kwargs: Any) -> None: 

459 """List the datasets in a repository.""" 

460 for table in script.QueryDatasets(**kwargs).getTables(): 

461 print("") 

462 table.pprint_all() 

463 print("") 

464 

465 

466@click.command(cls=ButlerCommand) 

467@repo_argument(required=True) 

468@click.argument("input-collection") 

469@click.argument("output-collection") 

470@click.argument("dataset-type-name") 

471@click.option( 

472 "--begin-date", 

473 type=str, 

474 default=None, 

475 help=unwrap( 

476 """ISO-8601 datetime (TAI) of the beginning of the validity range for the 

477 certified calibrations.""" 

478 ), 

479) 

480@click.option( 

481 "--end-date", 

482 type=str, 

483 default=None, 

484 help=unwrap( 

485 """ISO-8601 datetime (TAI) of the end of the validity range for the 

486 certified calibrations.""" 

487 ), 

488) 

489@click.option( 

490 "--search-all-inputs", 

491 is_flag=True, 

492 default=False, 

493 help=unwrap( 

494 """Search all children of the inputCollection if it is a CHAINED collection, 

495 instead of just the most recent one.""" 

496 ), 

497) 

498@options_file_option() 

499def certify_calibrations(*args: Any, **kwargs: Any) -> None: 

500 """Certify calibrations in a repository.""" 

501 script.certifyCalibrations(*args, **kwargs) 

502 

503 

504@click.command(cls=ButlerCommand) 

505@repo_argument(required=True) 

506@dimensions_argument( 

507 help=unwrap( 

508 """DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

509 instrument, or tract. Will be expanded to include any dependencies.""" 

510 ) 

511) 

512@collections_option(help=collections_option.help + " May only be used with --datasets.") 

513@datasets_option( 

514 help=unwrap( 

515 """An expression that fully or partially identifies dataset types that should 

516 constrain the yielded data IDs. For example, including "raw" here would 

517 constrain the yielded "instrument", "exposure", "detector", and 

518 "physical_filter" values to only those for which at least one "raw" dataset 

519 exists in "collections". Requires --collections.""" 

520 ) 

521) 

522@where_option(help=where_help) 

523@order_by_option() 

524@limit_option() 

525@offset_option() 

526@options_file_option() 

527def query_data_ids(**kwargs: Any) -> None: 

528 """List the data IDs in a repository.""" 

529 table, reason = script.queryDataIds(**kwargs) 

530 if table: 

531 table.pprint_all() 

532 else: 

533 if reason: 

534 print(reason) 

535 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

536 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

537 else: 

538 print("No results. Try --help for more information.") 

539 

540 

541@click.command(cls=ButlerCommand) 

542@repo_argument(required=True) 

543@element_argument(required=True) 

544@datasets_option( 

545 help=unwrap( 

546 """An expression that fully or partially identifies dataset types that should 

547 constrain the yielded records. May only be used with 

548 --collections.""" 

549 ) 

550) 

551@collections_option(help=collections_option.help + " May only be used with --datasets.") 

552@where_option(help=where_help) 

553@order_by_option() 

554@limit_option() 

555@offset_option() 

556@click.option( 

557 "--no-check", 

558 is_flag=True, 

559 help=unwrap( 

560 """Don't check the query before execution. By default the query is checked before it 

561 executed, this may reject some valid queries that resemble common mistakes.""" 

562 ), 

563) 

564@options_file_option() 

565def query_dimension_records(**kwargs: Any) -> None: 

566 """Query for dimension information.""" 

567 table = script.queryDimensionRecords(**kwargs) 

568 if table: 

569 table.pprint_all() 

570 else: 

571 print("No results. Try --help for more information.") 

572 

573 

574@click.command(cls=ButlerCommand) 

575@repo_argument(required=True) 

576@query_datasets_options(showUri=False, useArguments=False, repo=False) 

577@destination_argument(help="Destination URI of folder to receive file artifacts.") 

578@transfer_option() 

579@verbose_option(help="Report destination location of all transferred artifacts.") 

580@click.option( 

581 "--preserve-path/--no-preserve-path", 

582 is_flag=True, 

583 default=True, 

584 help="Preserve the datastore path to the artifact at the destination.", 

585) 

586@click.option( 

587 "--clobber/--no-clobber", 

588 is_flag=True, 

589 default=False, 

590 help="If clobber, overwrite files if they exist locally.", 

591) 

592@options_file_option() 

593def retrieve_artifacts(**kwargs: Any) -> None: 

594 """Retrieve file artifacts associated with datasets in a repository.""" 

595 verbose = kwargs.pop("verbose") 

596 transferred = script.retrieveArtifacts(**kwargs) 

597 if verbose and transferred: 

598 print(f"Transferred the following to {kwargs['destination']}:") 

599 for uri in transferred: 

600 print(uri) 

601 print() 

602 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

603 

604 

605@click.command(cls=ButlerCommand) 

606@click.argument("source", required=True) 

607@click.argument("dest", required=True) 

608@query_datasets_options(showUri=False, useArguments=False, repo=False) 

609@transfer_option() 

610@register_dataset_types_option() 

611@transfer_dimensions_option() 

612@options_file_option() 

613def transfer_datasets(**kwargs: Any) -> None: 

614 """Transfer datasets from a source butler to a destination butler. 

615 

616 SOURCE is a URI to the Butler repository containing the RUN dataset. 

617 

618 DEST is a URI to the Butler repository that will receive copies of the 

619 datasets. 

620 """ 

621 number = script.transferDatasets(**kwargs) 

622 print(f"Number of datasets transferred: {number}") 

623 

624 

625@click.command(cls=ButlerCommand) 

626@repo_argument(required=True) 

627@click.argument("parent", required=True, nargs=1) 

628@click.argument("children", required=False, nargs=-1, callback=split_commas) 

629@click.option( 

630 "--doc", 

631 default="", 

632 help="Documentation string associated with this collection. " 

633 "Only relevant if the collection is newly created.", 

634) 

635@click.option( 

636 "--flatten/--no-flatten", 

637 default=False, 

638 help="If `True` recursively flatten out any nested chained collections in children first.", 

639) 

640@click.option( 

641 "--mode", 

642 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

643 default="redefine", 

644 help="Update mode: " 

645 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

646 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

647 "'pop': Pop a numbered element off the chain. Defaults to popping " 

648 "the first element (0). ``children`` must be integers if given. " 

649 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

650 "'extend': Modify existing chain to extend it with the supplied CHILDREN.", 

651) 

652def collection_chain(**kwargs: Any) -> None: 

653 """Define a collection chain. 

654 

655 PARENT is the name of the chained collection to create or modify. If the 

656 collection already exists the chain associated with it will be updated. 

657 

658 CHILDREN are the collections to be used to modify the chain. The supplied 

659 values will be split on comma. The exact usage depends on the MODE option. 

660 For example, 

661 

662 $ butler collection-chain REPO PARENT child1,child2 child3 

663 

664 will result in three children being included in the chain. 

665 

666 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

667 collections to be removed from the current chain. 

668 MODE 'pop' can take negative integers to indicate removal relative to the 

669 end of the chain, but when doing that '--' must be given to indicate the 

670 end of the options specification. 

671 

672 $ butler collection-chain REPO --mode=pop PARENT -- -1 

673 

674 Will remove the final collection from the chain. 

675 """ 

676 chain = script.collectionChain(**kwargs) 

677 print(f"[{', '.join(chain)}]") 

678 

679 

680@click.command(cls=ButlerCommand) 

681@repo_argument(required=True) 

682@click.argument("dataset_type", required=True) 

683@click.argument("run", required=True) 

684@click.argument("table_file", required=True) 

685@click.option( 

686 "--formatter", 

687 type=str, 

688 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

689 " will be determined from the dataset type and datastore configuration.", 

690) 

691@click.option( 

692 "--id-generation-mode", 

693 default="UNIQUE", 

694 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

695 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

696 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

697 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

698 " repositories.", 

699 callback=to_upper, 

700 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False), 

701) 

702@click.option( 

703 "--data-id", 

704 type=str, 

705 multiple=True, 

706 callback=split_commas, 

707 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

708 " files. This can be used to simplify the table file by removing repeated entries that are" 

709 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

710 " comma separation or multiple command line options.", 

711) 

712@click.option( 

713 "--prefix", 

714 type=str, 

715 help="For relative paths in the table file, specify a prefix to use. The default is to" 

716 " use the current working directory.", 

717) 

718@transfer_option() 

719def ingest_files(**kwargs: Any) -> None: 

720 """Ingest files from table file. 

721 

722 DATASET_TYPE is the name of the dataset type to be associated with these 

723 files. This dataset type must already exist and will not be created by 

724 this command. There can only be one dataset type per invocation of this 

725 command. 

726 

727 RUN is the run to use for the file ingest. 

728 

729 TABLE_FILE refers to a file that can be read by astropy.table with 

730 columns of: 

731 

732 file URI, dimension1, dimension2, ..., dimensionN 

733 

734 where the first column is the URI to the file to be ingested and the 

735 remaining columns define the dataId to associate with that file. 

736 The column names should match the dimensions for the specified dataset 

737 type. Relative file URI by default is assumed to be relative to the 

738 current working directory but can be overridden using the ``--prefix`` 

739 option. 

740 

741 This command does not create dimension records and so any records must 

742 be created by other means. This command should not be used to ingest 

743 raw camera exposures. 

744 """ 

745 script.ingest_files(**kwargs) 

746 

747 

748@click.command(cls=ButlerCommand) 

749@repo_argument(required=True) 

750@click.argument("dataset_type", required=True) 

751@click.argument("storage_class", required=True) 

752@click.argument("dimensions", required=False, nargs=-1) 

753@click.option( 

754 "--is-calibration/--no-is-calibration", 

755 is_flag=True, 

756 default=False, 

757 help="Indicate that this dataset type can be part of a calibration collection.", 

758) 

759def register_dataset_type(**kwargs: Any) -> None: 

760 """Register a new dataset type with this butler repository. 

761 

762 DATASET_TYPE is the name of the dataset type. 

763 

764 STORAGE_CLASS is the name of the StorageClass to be associated with 

765 this dataset type. 

766 

767 DIMENSIONS is a list of all the dimensions relevant to this 

768 dataset type. It can be an empty list. 

769 

770 A component dataset type (such as "something.component") is not a 

771 real dataset type and so can not be defined by this command. They are 

772 automatically derived from the composite dataset type when a composite 

773 storage class is specified. 

774 """ 

775 inserted = script.register_dataset_type(**kwargs) 

776 if inserted: 

777 print("Dataset type successfully registered.") 

778 else: 

779 print("Dataset type already existed in identical form.") 

780 

781 

782@click.command(cls=ButlerCommand) 

783@repo_argument(required=True) 

784@directory_argument(required=True, help="DIRECTORY is the folder to receive the exported calibrations.") 

785@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

786@dataset_type_option(help="Specific DatasetType(s) to export.", multiple=True) 

787@transfer_option() 

788def export_calibs(*args: Any, **kwargs: Any) -> None: 

789 """Export calibrations from the butler for import elsewhere.""" 

790 table = script.exportCalibs(*args, **kwargs) 

791 if table: 

792 table.pprint_all(align="<")