Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 56%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

297 statements  

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = () 

23 

24import click 

25 

26from ... import script 

27from ..opt import ( 

28 collection_argument, 

29 collection_type_option, 

30 collections_argument, 

31 collections_option, 

32 components_option, 

33 confirm_option, 

34 dataset_type_option, 

35 datasets_option, 

36 destination_argument, 

37 dimensions_argument, 

38 directory_argument, 

39 element_argument, 

40 glob_argument, 

41 limit_option, 

42 offset_option, 

43 options_file_option, 

44 order_by_option, 

45 query_datasets_options, 

46 register_dataset_types_option, 

47 repo_argument, 

48 transfer_option, 

49 verbose_option, 

50 where_option, 

51) 

52from ..utils import ( 

53 ButlerCommand, 

54 MWOptionDecorator, 

55 option_section, 

56 printAstropyTables, 

57 split_commas, 

58 to_upper, 

59 typeStrAcceptsMultiple, 

60 unwrap, 

61 where_help, 

62) 

63 

64willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

65existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

66 

67 

68@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

69@repo_argument(required=True) 

70@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

71@query_datasets_options(repo=False, showUri=False, useArguments=False) 

72@options_file_option() 

73def associate(**kwargs): 

74 """Add existing datasets to a tagged collection; searches for datasets with 

75 the options and adds them to the named COLLECTION. 

76 """ 

77 script.associate(**kwargs) 

78 

79 

80# The conversion from the import command name to the butler_import function 

81# name for subcommand lookup is implemented in the cli/butler.py, in 

82# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

83# must be reflected in that location. If this becomes a common pattern a better 

84# mechanism should be implemented. 

85@click.command("import", cls=ButlerCommand) 

86@repo_argument(required=True, help=willCreateRepoHelp) 

87@directory_argument(required=True) 

88@transfer_option() 

89@click.option( 

90 "--export-file", 

91 help="Name for the file that contains database information associated with the exported " 

92 "datasets. If this is not an absolute path, does not exist in the current working " 

93 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

94 'to "export.yaml".', 

95 type=click.File("r"), 

96) 

97@click.option( 

98 "--skip-dimensions", 

99 "-s", 

100 type=str, 

101 multiple=True, 

102 callback=split_commas, 

103 metavar=typeStrAcceptsMultiple, 

104 help="Dimensions that should be skipped during import", 

105) 

106@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

107@options_file_option() 

108def butler_import(*args, **kwargs): 

109 """Import data into a butler repository.""" 

110 script.butlerImport(*args, **kwargs) 

111 

112 

113@click.command(cls=ButlerCommand) 

114@repo_argument(required=True, help=willCreateRepoHelp) 

115@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

116@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

117@click.option( 

118 "--standalone", 

119 is_flag=True, 

120 help="Include all defaults in the config file in the repo, " 

121 "insulating the repo from changes in package defaults.", 

122) 

123@click.option( 

124 "--override", is_flag=True, help="Allow values in the supplied config to override all repo settings." 

125) 

126@click.option( 

127 "--outfile", 

128 "-f", 

129 default=None, 

130 type=str, 

131 help="Name of output file to receive repository " 

132 "configuration. Default is to write butler.yaml into the specified repo.", 

133) 

134@options_file_option() 

135def create(*args, **kwargs): 

136 """Create an empty Gen3 Butler repository.""" 

137 script.createRepo(*args, **kwargs) 

138 

139 

140@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

141@repo_argument(required=True, help=existingRepoHelp) 

142@click.option( 

143 "--subset", 

144 "-s", 

145 type=str, 

146 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

147 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.", 

148) 

149@click.option( 

150 "--searchpath", 

151 "-p", 

152 type=str, 

153 multiple=True, 

154 callback=split_commas, 

155 metavar=typeStrAcceptsMultiple, 

156 help="Additional search paths to use for configuration overrides", 

157) 

158@click.option( 

159 "--file", 

160 "outfile", 

161 type=click.File("w"), 

162 default="-", 

163 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout " 

164 "by default.", 

165) 

166@options_file_option() 

167def config_dump(*args, **kwargs): 

168 """Dump either a subset or full Butler configuration to standard output.""" 

169 script.configDump(*args, **kwargs) 

170 

171 

172@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

173@repo_argument(required=True, help=existingRepoHelp) 

174@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

175@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

176@click.option( 

177 "--ignore", 

178 "-i", 

179 type=str, 

180 multiple=True, 

181 callback=split_commas, 

182 metavar=typeStrAcceptsMultiple, 

183 help="DatasetType(s) to ignore for validation.", 

184) 

185@options_file_option() 

186def config_validate(*args, **kwargs): 

187 """Validate the configuration files for a Gen3 Butler repository.""" 

188 is_good = script.configValidate(*args, **kwargs) 

189 if not is_good: 

190 raise click.exceptions.Exit(1) 

191 

192 

193@click.command(cls=ButlerCommand) 

194@repo_argument(required=True) 

195@collection_argument( 

196 help=unwrap( 

197 """COLLECTION is the Name of the collection to remove. If this is a tagged or 

198 chained collection, datasets within the collection are not modified unless --unstore 

199 is passed. If this is a run collection, --purge and --unstore must be passed, and 

200 all datasets in it are fully removed from the data repository.""" 

201 ) 

202) 

203@click.option( 

204 "--purge", 

205 help=unwrap( 

206 """Permit RUN collections to be removed, fully removing datasets within them. 

207 Requires --unstore as an added precaution against accidental deletion. Must not be 

208 passed if the collection is not a RUN.""" 

209 ), 

210 is_flag=True, 

211) 

212@click.option( 

213 "--unstore", 

214 help=("""Remove all datasets in the collection from all datastores in which they appear."""), 

215 is_flag=True, 

216) 

217@click.option( 

218 "--unlink", 

219 help="Before removing the given `collection` unlink it from from this parent collection.", 

220 multiple=True, 

221 callback=split_commas, 

222) 

223@confirm_option() 

224@options_file_option() 

225def prune_collection(**kwargs): 

226 """Remove a collection and possibly prune datasets within it.""" 

227 result = script.pruneCollection(**kwargs) 

228 if result.confirm: 

229 print("The following collections will be removed:") 

230 result.removeTable.pprint_all(align="<") 

231 doContinue = click.confirm("Continue?", default=False) 

232 else: 

233 doContinue = True 

234 if doContinue: 

235 result.onConfirmation() 

236 print("Removed collections.") 

237 else: 

238 print("Aborted.") 

239 

240 

241pruneDatasets_wouldRemoveMsg = unwrap( 

242 """The following datasets will be removed from any datastores in which 

243 they are present:""" 

244) 

245pruneDatasets_wouldDisassociateMsg = unwrap( 

246 """The following datasets will be disassociated from {collections} 

247 if they are currently present in it (which is not checked):""" 

248) 

249pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap( 

250 """The following datasets will be disassociated from 

251 {collections} if they are currently present in it (which is 

252 not checked), and removed from any datastores in which they 

253 are present.""" 

254) 

255pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

256pruneDatasets_askContinueMsg = "Continue?" 

257pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

258pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

259pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

260pruneDatasets_noDatasetsFound = "Did not find any datasets." 

261pruneDatasets_errPurgeAndDisassociate = unwrap( 

262 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

263 collections, but --purge forces disassociation from all of them. """ 

264) 

265pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

266pruneDatasets_errNoCollectionRestriction = unwrap( 

267 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all 

268 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

269 collection, or by using --disassociate to select a tagged collection.""" 

270) 

271pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

272pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

273 

274disassociate_option = MWOptionDecorator( 

275 "--disassociate", 

276 "disassociate_tags", 

277 help=unwrap( 

278 """Disassociate pruned datasets from the given tagged collections. May not be used with 

279 --purge.""" 

280 ), 

281 multiple=True, 

282 callback=split_commas, 

283 metavar="TAG", 

284) 

285 

286 

287purge_option = MWOptionDecorator( 

288 "--purge", 

289 "purge_run", 

290 help=unwrap( 

291 """Completely remove the dataset from the given RUN in the Registry. May not be used with 

292 --disassociate. Note, this may remove provenance information from datasets other than those 

293 provided, and should be used with extreme care.""" 

294 ), 

295 metavar="RUN", 

296) 

297 

298 

299find_all_option = MWOptionDecorator( 

300 "--find-all", 

301 is_flag=True, 

302 help=unwrap( 

303 """Purge the dataset results from all of the collections in which a dataset of that dataset 

304 type + data id combination appear. (By default only the first found dataset type + data id is 

305 purged, according to the order of COLLECTIONS passed in).""" 

306 ), 

307) 

308 

309 

310unstore_option = MWOptionDecorator( 

311 "--unstore", 

312 is_flag=True, 

313 help=unwrap( 

314 """Remove these datasets from all datastores configured with this data repository. If 

315 --disassociate and --purge are not used then --unstore will be used by default. Note that 

316 --unstore will make it impossible to retrieve these datasets even via other collections. 

317 Datasets that are already not stored are ignored by this option.""" 

318 ), 

319) 

320 

321 

322dry_run_option = MWOptionDecorator( 

323 "--dry-run", 

324 is_flag=True, 

325 help=unwrap( 

326 """Display the datasets that would be removed but do not remove them. 

327 

328 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

329 will remove it from all of them, even though the only one this will show is its RUN 

330 collection.""" 

331 ), 

332) 

333 

334 

335quiet_option = MWOptionDecorator( 

336 "--quiet", 

337 is_flag=True, 

338 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed."""), 

339) 

340 

341 

342@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

343@repo_argument(required=True) 

344@collections_argument( 

345 help=unwrap( 

346 """COLLECTIONS is or more expressions that identify the collections to 

347 search for datasets. Glob-style expressions may be used but only if the 

348 --find-all flag is also passed.""" 

349 ) 

350) 

351@option_section("Query Datasets Options:") 

352@datasets_option( 

353 help="One or more glob-style expressions that identify the dataset types to be pruned.", 

354 multiple=True, 

355 callback=split_commas, 

356) 

357@find_all_option() 

358@where_option(help=where_help) 

359@option_section("Prune Options:") 

360@disassociate_option() 

361@purge_option() 

362@unstore_option() 

363@option_section("Execution Options:") 

364@dry_run_option() 

365@confirm_option() 

366@quiet_option() 

367@option_section("Other Options:") 

368@options_file_option() 

369def prune_datasets(**kwargs): 

370 """Query for and remove one or more datasets from a collection and/or 

371 storage. 

372 """ 

373 quiet = kwargs.pop("quiet", False) 

374 if quiet: 

375 if kwargs["dry_run"]: 

376 raise click.ClickException(pruneDatasets_errQuietWithDryRun) 

377 kwargs["confirm"] = False 

378 

379 result = script.pruneDatasets(**kwargs) 

380 

381 if result.errPurgeAndDisassociate: 

382 raise click.ClickException(pruneDatasets_errPurgeAndDisassociate) 

383 return 

384 if result.errNoCollectionRestriction: 

385 raise click.ClickException(pruneDatasets_errNoCollectionRestriction) 

386 if result.errPruneOnNotRun: 

387 raise click.ClickException(pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

388 if result.errNoOp: 

389 raise click.ClickException(pruneDatasets_errNoOp) 

390 if result.dryRun: 

391 if result.action["disassociate"] and result.action["unstore"]: 

392 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

393 elif result.action["disassociate"]: 

394 msg = pruneDatasets_wouldDisassociateMsg 

395 else: 

396 msg = pruneDatasets_wouldRemoveMsg 

397 print(msg.format(**result.action)) 

398 printAstropyTables(result.tables) 

399 return 

400 if result.confirm: 

401 if not result.tables: 

402 print(pruneDatasets_noDatasetsFound) 

403 return 

404 print(pruneDatasets_willRemoveMsg) 

405 printAstropyTables(result.tables) 

406 doContinue = click.confirm(pruneDatasets_askContinueMsg, default=False) 

407 if doContinue: 

408 result.onConfirmation() 

409 print(pruneDatasets_didRemoveAforementioned) 

410 else: 

411 print(pruneDatasets_didNotRemoveAforementioned) 

412 return 

413 if result.finished: 

414 if not quiet: 

415 print(pruneDatasets_didRemoveMsg) 

416 printAstropyTables(result.tables) 

417 return 

418 

419 

420@click.command(short_help="Search for collections.", cls=ButlerCommand) 

421@repo_argument(required=True) 

422@glob_argument( 

423 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

424 "collections to return." 

425) 

426@collection_type_option() 

427@click.option( 

428 "--chains", 

429 default="table", 

430 help=unwrap( 

431 """Affects how results are presented. TABLE lists each dataset in a row with 

432 chained datasets' children listed in a Definition column. TREE lists children below 

433 their parent in tree form. FLATTEN lists all datasets, including child datasets in 

434 one list.Defaults to TABLE. """ 

435 ), 

436 callback=to_upper, 

437 type=click.Choice(("TABLE", "TREE", "FLATTEN"), case_sensitive=False), 

438) 

439@options_file_option() 

440def query_collections(*args, **kwargs): 

441 """Get the collections whose names match an expression.""" 

442 table = script.queryCollections(*args, **kwargs) 

443 # The unit test that mocks script.queryCollections does not return a table 

444 # so we need the following `if`. 

445 if table: 

446 # When chains==TREE, the children of chained datasets are indented 

447 # relative to their parents. For this to work properly the table must 

448 # be left-aligned. 

449 table.pprint_all(align="<") 

450 

451 

452@click.command(cls=ButlerCommand) 

453@repo_argument(required=True) 

454@glob_argument( 

455 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

456 "dataset types to return." 

457) 

458@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

459@components_option() 

460@options_file_option() 

461def query_dataset_types(*args, **kwargs): 

462 """Get the dataset types in a repository.""" 

463 table = script.queryDatasetTypes(*args, **kwargs) 

464 if table: 

465 table.pprint_all() 

466 else: 

467 print("No results. Try --help for more information.") 

468 

469 

470@click.command(cls=ButlerCommand) 

471@repo_argument(required=True) 

472@click.argument("dataset-type-name", nargs=1) 

473def remove_dataset_type(*args, **kwargs): 

474 """Remove a dataset type definition from a repository.""" 

475 script.removeDatasetType(*args, **kwargs) 

476 

477 

478@click.command(cls=ButlerCommand) 

479@query_datasets_options() 

480@options_file_option() 

481def query_datasets(**kwargs): 

482 """List the datasets in a repository.""" 

483 for table in script.QueryDatasets(**kwargs).getTables(): 

484 print("") 

485 table.pprint_all() 

486 print("") 

487 

488 

489@click.command(cls=ButlerCommand) 

490@repo_argument(required=True) 

491@click.argument("input-collection") 

492@click.argument("output-collection") 

493@click.argument("dataset-type-name") 

494@click.option( 

495 "--begin-date", 

496 type=str, 

497 default=None, 

498 help=unwrap( 

499 """ISO-8601 datetime (TAI) of the beginning of the validity range for the 

500 certified calibrations.""" 

501 ), 

502) 

503@click.option( 

504 "--end-date", 

505 type=str, 

506 default=None, 

507 help=unwrap( 

508 """ISO-8601 datetime (TAI) of the end of the validity range for the 

509 certified calibrations.""" 

510 ), 

511) 

512@click.option( 

513 "--search-all-inputs", 

514 is_flag=True, 

515 default=False, 

516 help=unwrap( 

517 """Search all children of the inputCollection if it is a CHAINED collection, 

518 instead of just the most recent one.""" 

519 ), 

520) 

521@options_file_option() 

522def certify_calibrations(*args, **kwargs): 

523 """Certify calibrations in a repository.""" 

524 script.certifyCalibrations(*args, **kwargs) 

525 

526 

527@click.command(cls=ButlerCommand) 

528@repo_argument(required=True) 

529@dimensions_argument( 

530 help=unwrap( 

531 """DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

532 instrument, or tract. Will be expanded to include any dependencies.""" 

533 ) 

534) 

535@collections_option(help=collections_option.help + " May only be used with --datasets.") 

536@datasets_option( 

537 help=unwrap( 

538 """An expression that fully or partially identifies dataset types that should 

539 constrain the yielded data IDs. For example, including "raw" here would 

540 constrain the yielded "instrument", "exposure", "detector", and 

541 "physical_filter" values to only those for which at least one "raw" dataset 

542 exists in "collections". Requires --collections.""" 

543 ) 

544) 

545@where_option(help=where_help) 

546@order_by_option() 

547@limit_option() 

548@offset_option() 

549@options_file_option() 

550def query_data_ids(**kwargs): 

551 """List the data IDs in a repository.""" 

552 table = script.queryDataIds(**kwargs) 

553 if table: 

554 table.pprint_all() 

555 else: 

556 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

557 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

558 else: 

559 print("No results. Try --help for more information.") 

560 

561 

562@click.command(cls=ButlerCommand) 

563@repo_argument(required=True) 

564@element_argument(required=True) 

565@datasets_option( 

566 help=unwrap( 

567 """An expression that fully or partially identifies dataset types that should 

568 constrain the yielded records. May only be used with 

569 --collections.""" 

570 ) 

571) 

572@collections_option(help=collections_option.help + " May only be used with --datasets.") 

573@where_option(help=where_help) 

574@order_by_option() 

575@limit_option() 

576@offset_option() 

577@click.option( 

578 "--no-check", 

579 is_flag=True, 

580 help=unwrap( 

581 """Don't check the query before execution. By default the query is checked before it 

582 executed, this may reject some valid queries that resemble common mistakes.""" 

583 ), 

584) 

585@options_file_option() 

586def query_dimension_records(**kwargs): 

587 """Query for dimension information.""" 

588 table = script.queryDimensionRecords(**kwargs) 

589 if table: 

590 table.pprint_all() 

591 else: 

592 print("No results. Try --help for more information.") 

593 

594 

595@click.command(cls=ButlerCommand) 

596@repo_argument(required=True) 

597@query_datasets_options(showUri=False, useArguments=False, repo=False) 

598@destination_argument(help="Destination URI of folder to receive file artifacts.") 

599@transfer_option() 

600@verbose_option(help="Report destination location of all transferred artifacts.") 

601@click.option( 

602 "--preserve-path/--no-preserve-path", 

603 is_flag=True, 

604 default=True, 

605 help="Preserve the datastore path to the artifact at the destination.", 

606) 

607@click.option( 

608 "--clobber/--no-clobber", 

609 is_flag=True, 

610 default=False, 

611 help="If clobber, overwrite files if they exist locally.", 

612) 

613@options_file_option() 

614def retrieve_artifacts(**kwargs): 

615 """Retrieve file artifacts associated with datasets in a repository.""" 

616 verbose = kwargs.pop("verbose") 

617 transferred = script.retrieveArtifacts(**kwargs) 

618 if verbose and transferred: 

619 print(f"Transferred the following to {kwargs['destination']}:") 

620 for uri in transferred: 

621 print(uri) 

622 print() 

623 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

624 

625 

626@click.command(cls=ButlerCommand) 

627@click.argument("source", required=True) 

628@click.argument("dest", required=True) 

629@query_datasets_options(showUri=False, useArguments=False, repo=False) 

630@transfer_option() 

631@register_dataset_types_option() 

632@options_file_option() 

633def transfer_datasets(**kwargs): 

634 """Transfer datasets from a source butler to a destination butler. 

635 

636 SOURCE is a URI to the Butler repository containing the RUN dataset. 

637 

638 DEST is a URI to the Butler repository that will receive copies of the 

639 datasets. 

640 """ 

641 number = script.transferDatasets(**kwargs) 

642 print(f"Number of datasets transferred: {number}") 

643 

644 

645@click.command(cls=ButlerCommand) 

646@repo_argument(required=True) 

647@click.argument("parent", required=True, nargs=1) 

648@click.argument("children", required=False, nargs=-1, callback=split_commas) 

649@click.option( 

650 "--doc", 

651 default="", 

652 help="Documentation string associated with this collection. " 

653 "Only relevant if the collection is newly created.", 

654) 

655@click.option( 

656 "--flatten/--no-flatten", 

657 default=False, 

658 help="If `True` recursively flatten out any nested chained collections in children first.", 

659) 

660@click.option( 

661 "--mode", 

662 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

663 default="redefine", 

664 help="Update mode: " 

665 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

666 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

667 "'pop': Pop a numbered element off the chain. Defaults to popping " 

668 "the first element (0). ``children`` must be integers if given. " 

669 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

670 "'extend': Modify existing chain to extend it with the supplied CHILDREN.", 

671) 

672def collection_chain(**kwargs): 

673 """Define a collection chain. 

674 

675 PARENT is the name of the chained collection to create or modify. If the 

676 collection already exists the chain associated with it will be updated. 

677 

678 CHILDREN are the collections to be used to modify the chain. The supplied 

679 values will be split on comma. The exact usage depends on the MODE option. 

680 For example, 

681 

682 $ butler collection-chain REPO PARENT child1,child2 child3 

683 

684 will result in three children being included in the chain. 

685 

686 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

687 collections to be removed from the current chain. 

688 MODE 'pop' can take negative integers to indicate removal relative to the 

689 end of the chain, but when doing that '--' must be given to indicate the 

690 end of the options specification. 

691 

692 $ butler collection-chain REPO --mode=pop PARENT -- -1 

693 

694 Will remove the final collection from the chain. 

695 """ 

696 chain = script.collectionChain(**kwargs) 

697 print(f"[{', '.join(chain)}]") 

698 

699 

700@click.command(cls=ButlerCommand) 

701@repo_argument(required=True) 

702@click.argument("dataset_type", required=True) 

703@click.argument("run", required=True) 

704@click.argument("table_file", required=True) 

705@click.option( 

706 "--formatter", 

707 type=str, 

708 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

709 " will be determined from the dataset type and datastore configuration.", 

710) 

711@click.option( 

712 "--id-generation-mode", 

713 default="UNIQUE", 

714 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

715 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

716 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

717 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

718 " repositories.", 

719 callback=to_upper, 

720 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False), 

721) 

722@click.option( 

723 "--data-id", 

724 type=str, 

725 multiple=True, 

726 callback=split_commas, 

727 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

728 " files. This can be used to simplify the table file by removing repeated entries that are" 

729 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

730 " comma separation or multiple command line options.", 

731) 

732@click.option( 

733 "--prefix", 

734 type=str, 

735 help="For relative paths in the table file, specify a prefix to use. The default is to" 

736 " use the current working directory.", 

737) 

738@transfer_option() 

739def ingest_files(**kwargs): 

740 """Ingest files from table file. 

741 

742 DATASET_TYPE is the name of the dataset type to be associated with these 

743 files. This dataset type must already exist and will not be created by 

744 this command. There can only be one dataset type per invocation of this 

745 command. 

746 

747 RUN is the run to use for the file ingest. 

748 

749 TABLE_FILE refers to a file that can be read by astropy.table with 

750 columns of: 

751 

752 file URI, dimension1, dimension2, ..., dimensionN 

753 

754 where the first column is the URI to the file to be ingested and the 

755 remaining columns define the dataId to associate with that file. 

756 The column names should match the dimensions for the specified dataset 

757 type. Relative file URI by default is assumed to be relative to the 

758 current working directory but can be overridden using the ``--prefix`` 

759 option. 

760 

761 This command does not create dimension records and so any records must 

762 be created by other means. This command should not be used to ingest 

763 raw camera exposures. 

764 """ 

765 script.ingest_files(**kwargs) 

766 

767 

768@click.command(cls=ButlerCommand) 

769@repo_argument(required=True) 

770@click.argument("dataset_type", required=True) 

771@click.argument("storage_class", required=True) 

772@click.argument("dimensions", required=False, nargs=-1) 

773@click.option( 

774 "--is-calibration/--no-is-calibration", 

775 is_flag=True, 

776 default=False, 

777 help="Indicate that this dataset type can be part of a calibration collection.", 

778) 

779def register_dataset_type(**kwargs): 

780 """Register a new dataset type with this butler repository. 

781 

782 DATASET_TYPE is the name of the dataset type. 

783 

784 STORAGE_CLASS is the name of the StorageClass to be associated with 

785 this dataset type. 

786 

787 DIMENSIONS is a list of all the dimensions relevant to this 

788 dataset type. It can be an empty list. 

789 

790 A component dataset type (such as "something.component") is not a 

791 real dataset type and so can not be defined by this command. They are 

792 automatically derived from the composite dataset type when a composite 

793 storage class is specified. 

794 """ 

795 inserted = script.register_dataset_type(**kwargs) 

796 if inserted: 

797 print("Dataset type successfully registered.") 

798 else: 

799 print("Dataset type already existed in identical form.") 

800 

801 

802@click.command(cls=ButlerCommand) 

803@repo_argument(required=True) 

804@directory_argument(required=True) 

805@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

806def export_calibs(*args, **kwargs): 

807 """Export calibrations from the butler for import elsewhere.""" 

808 table = script.exportCalibs(*args, **kwargs) 

809 if table: 

810 table.pprint_all(align="<")