Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 56%

311 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-02-01 02:05 -0800

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23__all__ = () 

24 

25from collections.abc import Callable 

26from typing import Any, cast 

27 

28import click 

29from deprecated.sphinx import deprecated 

30 

31from ... import script 

32from .. import utils as cmd_utils 

33from ..opt import ( 

34 collection_argument, 

35 collection_type_option, 

36 collections_argument, 

37 collections_option, 

38 components_option, 

39 confirm_option, 

40 dataset_type_option, 

41 datasets_option, 

42 destination_argument, 

43 dimensions_argument, 

44 directory_argument, 

45 element_argument, 

46 glob_argument, 

47 limit_option, 

48 offset_option, 

49 options_file_option, 

50 order_by_option, 

51 query_datasets_options, 

52 register_dataset_types_option, 

53 repo_argument, 

54 transfer_dimensions_option, 

55 transfer_option, 

56 verbose_option, 

57 where_option, 

58) 

59from ..utils import ( 

60 ButlerCommand, 

61 MWOptionDecorator, 

62 option_section, 

63 printAstropyTables, 

64 typeStrAcceptsMultiple, 

65 unwrap, 

66 where_help, 

67) 

68 

69# Cast the callback signatures to appease mypy since mypy thinks they 

70# are too constrained. 

71split_commas = cast( 

72 Callable[[click.Context, click.Option | click.Parameter, Any], Any], cmd_utils.split_commas 

73) 

74to_upper = cast(Callable[[click.Context, click.Option | click.Parameter, Any], Any], cmd_utils.to_upper) 

75 

76willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

77existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

78 

79 

80@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

81@repo_argument(required=True) 

82@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

83@query_datasets_options(repo=False, showUri=False, useArguments=False) 

84@options_file_option() 

85def associate(**kwargs: Any) -> None: 

86 """Add existing datasets to a tagged collection; searches for datasets with 

87 the options and adds them to the named COLLECTION. 

88 """ 

89 script.associate(**kwargs) 

90 

91 

92# The conversion from the import command name to the butler_import function 

93# name for subcommand lookup is implemented in the cli/butler.py, in 

94# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

95# must be reflected in that location. If this becomes a common pattern a better 

96# mechanism should be implemented. 

97@click.command("import", cls=ButlerCommand) 

98@repo_argument(required=True, help=willCreateRepoHelp) 

99@directory_argument(required=True) 

100@transfer_option() 

101@click.option( 

102 "--export-file", 

103 help="Name for the file that contains database information associated with the exported " 

104 "datasets. If this is not an absolute path, does not exist in the current working " 

105 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

106 'to "export.yaml".', 

107 type=click.File(mode="r"), 

108) 

109@click.option( 

110 "--skip-dimensions", 

111 "-s", 

112 type=str, 

113 multiple=True, 

114 callback=split_commas, 

115 metavar=typeStrAcceptsMultiple, 

116 help="Dimensions that should be skipped during import", 

117) 

118@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

119@options_file_option() 

120def butler_import(*args: Any, **kwargs: Any) -> None: 

121 """Import data into a butler repository.""" 

122 script.butlerImport(*args, **kwargs) 

123 

124 

125@click.command(cls=ButlerCommand) 

126@repo_argument(required=True, help=willCreateRepoHelp) 

127@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

128@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

129@click.option( 

130 "--standalone", 

131 is_flag=True, 

132 help="Include all defaults in the config file in the repo, " 

133 "insulating the repo from changes in package defaults.", 

134) 

135@click.option( 

136 "--override", is_flag=True, help="Allow values in the supplied config to override all repo settings." 

137) 

138@click.option( 

139 "--outfile", 

140 "-f", 

141 default=None, 

142 type=str, 

143 help="Name of output file to receive repository " 

144 "configuration. Default is to write butler.yaml into the specified repo.", 

145) 

146@options_file_option() 

147def create(*args: Any, **kwargs: Any) -> None: 

148 """Create an empty Gen3 Butler repository.""" 

149 script.createRepo(*args, **kwargs) 

150 

151 

152@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

153@repo_argument(required=True, help=existingRepoHelp) 

154@click.option( 

155 "--subset", 

156 "-s", 

157 type=str, 

158 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

159 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.", 

160) 

161@click.option( 

162 "--searchpath", 

163 "-p", 

164 type=str, 

165 multiple=True, 

166 callback=split_commas, 

167 metavar=typeStrAcceptsMultiple, 

168 help="Additional search paths to use for configuration overrides", 

169) 

170@click.option( 

171 "--file", 

172 "outfile", 

173 type=click.File(mode="w"), 

174 default="-", 

175 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout " 

176 "by default.", 

177) 

178@options_file_option() 

179def config_dump(*args: Any, **kwargs: Any) -> None: 

180 """Dump either a subset or full Butler configuration to standard output.""" 

181 script.configDump(*args, **kwargs) 

182 

183 

184@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

185@repo_argument(required=True, help=existingRepoHelp) 

186@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

187@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

188@click.option( 

189 "--ignore", 

190 "-i", 

191 type=str, 

192 multiple=True, 

193 callback=split_commas, 

194 metavar=typeStrAcceptsMultiple, 

195 help="DatasetType(s) to ignore for validation.", 

196) 

197@options_file_option() 

198def config_validate(*args: Any, **kwargs: Any) -> None: 

199 """Validate the configuration files for a Gen3 Butler repository.""" 

200 is_good = script.configValidate(*args, **kwargs) 

201 if not is_good: 

202 raise click.exceptions.Exit(1) 

203 

204 

205@click.command(cls=ButlerCommand) 

206@repo_argument(required=True) 

207@collection_argument( 

208 help=unwrap( 

209 """COLLECTION is the Name of the collection to remove. If this is a tagged or 

210 chained collection, datasets within the collection are not modified unless --unstore 

211 is passed. If this is a run collection, --purge and --unstore must be passed, and 

212 all datasets in it are fully removed from the data repository.""" 

213 ) 

214) 

215@click.option( 

216 "--purge", 

217 help=unwrap( 

218 """Permit RUN collections to be removed, fully removing datasets within them. 

219 Requires --unstore as an added precaution against accidental deletion. Must not be 

220 passed if the collection is not a RUN.""" 

221 ), 

222 is_flag=True, 

223) 

224@click.option( 

225 "--unstore", 

226 help=("""Remove all datasets in the collection from all datastores in which they appear."""), 

227 is_flag=True, 

228) 

229@click.option( 

230 "--unlink", 

231 help="Before removing the given `collection` unlink it from from this parent collection.", 

232 multiple=True, 

233 callback=split_commas, 

234) 

235@confirm_option() 

236@options_file_option() 

237@deprecated( 

238 reason="Please consider using remove-collections or remove-runs instead. Will be removed after v24.", 

239 version="v24.0", 

240 category=FutureWarning, 

241) 

242def prune_collection(**kwargs: Any) -> None: 

243 """Remove a collection and possibly prune datasets within it.""" 

244 result = script.pruneCollection(**kwargs) 

245 if result.confirm: 

246 print("The following collections will be removed:") 

247 result.removeTable.pprint_all(align="<") 

248 doContinue = click.confirm(text="Continue?", default=False) 

249 else: 

250 doContinue = True 

251 if doContinue: 

252 result.onConfirmation() 

253 print("Removed collections.") 

254 else: 

255 print("Aborted.") 

256 

257 

258pruneDatasets_wouldRemoveMsg = unwrap( 

259 """The following datasets will be removed from any datastores in which 

260 they are present:""" 

261) 

262pruneDatasets_wouldDisassociateMsg = unwrap( 

263 """The following datasets will be disassociated from {collections} 

264 if they are currently present in it (which is not checked):""" 

265) 

266pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap( 

267 """The following datasets will be disassociated from 

268 {collections} if they are currently present in it (which is 

269 not checked), and removed from any datastores in which they 

270 are present.""" 

271) 

272pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

273pruneDatasets_askContinueMsg = "Continue?" 

274pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

275pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

276pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

277pruneDatasets_noDatasetsFound = "Did not find any datasets." 

278pruneDatasets_errPurgeAndDisassociate = unwrap( 

279 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

280 collections, but --purge forces disassociation from all of them. """ 

281) 

282pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

283pruneDatasets_errNoCollectionRestriction = unwrap( 

284 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all 

285 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

286 collection, or by using --disassociate to select a tagged collection.""" 

287) 

288pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

289pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

290 

291disassociate_option = MWOptionDecorator( 

292 "--disassociate", 

293 "disassociate_tags", 

294 help=unwrap( 

295 """Disassociate pruned datasets from the given tagged collections. May not be used with 

296 --purge.""" 

297 ), 

298 multiple=True, 

299 callback=split_commas, 

300 metavar="TAG", 

301) 

302 

303 

304purge_option = MWOptionDecorator( 

305 "--purge", 

306 "purge_run", 

307 help=unwrap( 

308 """Completely remove the dataset from the given RUN in the Registry. May not be used with 

309 --disassociate. Note, this may remove provenance information from datasets other than those 

310 provided, and should be used with extreme care. RUN has to provided for backward 

311 compatibility, but datasets will be removed from any RUN-type collections.""" 

312 ), 

313 metavar="RUN", 

314) 

315 

316 

317find_all_option = MWOptionDecorator( 

318 "--find-all", 

319 is_flag=True, 

320 help=unwrap( 

321 """Purge the dataset results from all of the collections in which a dataset of that dataset 

322 type + data id combination appear. (By default only the first found dataset type + data id is 

323 purged, according to the order of COLLECTIONS passed in).""" 

324 ), 

325) 

326 

327 

328unstore_option = MWOptionDecorator( 

329 "--unstore", 

330 is_flag=True, 

331 help=unwrap( 

332 """Remove these datasets from all datastores configured with this data repository. If 

333 --disassociate and --purge are not used then --unstore will be used by default. Note that 

334 --unstore will make it impossible to retrieve these datasets even via other collections. 

335 Datasets that are already not stored are ignored by this option.""" 

336 ), 

337) 

338 

339 

340dry_run_option = MWOptionDecorator( 

341 "--dry-run", 

342 is_flag=True, 

343 help=unwrap( 

344 """Display the datasets that would be removed but do not remove them. 

345 

346 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

347 will remove it from all of them, even though the only one this will show is its RUN 

348 collection.""" 

349 ), 

350) 

351 

352 

353quiet_option = MWOptionDecorator( 

354 "--quiet", 

355 is_flag=True, 

356 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed."""), 

357) 

358 

359 

360@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

361@repo_argument(required=True) 

362@collections_argument( 

363 help=unwrap( 

364 """COLLECTIONS is or more expressions that identify the collections to 

365 search for datasets. Glob-style expressions may be used but only if the 

366 --find-all flag is also passed.""" 

367 ) 

368) 

369@option_section("Query Datasets Options:") 

370@datasets_option( 

371 help="One or more glob-style expressions that identify the dataset types to be pruned.", 

372 multiple=True, 

373 callback=split_commas, 

374) 

375@find_all_option() 

376@where_option(help=where_help) 

377@option_section("Prune Options:") 

378@disassociate_option() 

379@purge_option() 

380@unstore_option() 

381@option_section("Execution Options:") 

382@dry_run_option() 

383@confirm_option() 

384@quiet_option() 

385@option_section("Other Options:") 

386@options_file_option() 

387def prune_datasets(**kwargs: Any) -> None: 

388 """Query for and remove one or more datasets from a collection and/or 

389 storage. 

390 """ 

391 quiet = kwargs.pop("quiet", False) 

392 if quiet: 

393 if kwargs["dry_run"]: 

394 raise click.ClickException(message=pruneDatasets_errQuietWithDryRun) 

395 kwargs["confirm"] = False 

396 

397 result = script.pruneDatasets(**kwargs) 

398 

399 if result.errPurgeAndDisassociate: 

400 raise click.ClickException(message=pruneDatasets_errPurgeAndDisassociate) 

401 if result.errNoCollectionRestriction: 

402 raise click.ClickException(message=pruneDatasets_errNoCollectionRestriction) 

403 if result.errPruneOnNotRun: 

404 raise click.ClickException(message=pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

405 if result.errNoOp: 

406 raise click.ClickException(message=pruneDatasets_errNoOp) 

407 if result.dryRun: 

408 assert result.action is not None, "Dry run results have not been set up properly." 

409 if result.action["disassociate"] and result.action["unstore"]: 

410 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

411 elif result.action["disassociate"]: 

412 msg = pruneDatasets_wouldDisassociateMsg 

413 else: 

414 msg = pruneDatasets_wouldRemoveMsg 

415 print(msg.format(**result.action)) 

416 printAstropyTables(result.tables) 

417 return 

418 if result.confirm: 

419 if not result.tables: 

420 print(pruneDatasets_noDatasetsFound) 

421 return 

422 print(pruneDatasets_willRemoveMsg) 

423 printAstropyTables(result.tables) 

424 doContinue = click.confirm(text=pruneDatasets_askContinueMsg, default=False) 

425 if doContinue: 

426 if result.onConfirmation: 

427 result.onConfirmation() 

428 print(pruneDatasets_didRemoveAforementioned) 

429 else: 

430 print(pruneDatasets_didNotRemoveAforementioned) 

431 return 

432 if result.finished: 

433 if not quiet: 

434 print(pruneDatasets_didRemoveMsg) 

435 printAstropyTables(result.tables) 

436 return 

437 

438 

439@click.command(short_help="Search for collections.", cls=ButlerCommand) 

440@repo_argument(required=True) 

441@glob_argument( 

442 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

443 "collections to return." 

444) 

445@collection_type_option() 

446@click.option( 

447 "--chains", 

448 default="TREE", 

449 help="""Affects how results are presented: 

450 

451 TABLE lists each dataset in table form, with columns for dataset name 

452 and type, and a column that lists children of CHAINED datasets (if any 

453 CHAINED datasets are found). 

454 

455 INVERSE-TABLE is like TABLE but instead of a column listing CHAINED 

456 dataset children, it lists the parents of the dataset if it is contained 

457 in any CHAINED collections. 

458 

459 TREE recursively lists children below each CHAINED dataset in tree form. 

460 

461 INVERSE-TREE recursively lists parent datasets below each dataset in 

462 tree form. 

463 

464 FLATTEN lists all datasets, including child datasets, in one list. 

465 

466 [default: TREE]""", 

467 # above, the default value is included, instead of using show_default, so 

468 # that the default is printed on its own line instead of coming right after 

469 # the FLATTEN text. 

470 callback=to_upper, 

471 type=click.Choice( 

472 choices=("TABLE", "INVERSE-TABLE", "TREE", "INVERSE-TREE", "FLATTEN"), 

473 case_sensitive=False, 

474 ), 

475) 

476@options_file_option() 

477def query_collections(*args: Any, **kwargs: Any) -> None: 

478 """Get the collections whose names match an expression.""" 

479 table = script.queryCollections(*args, **kwargs) 

480 # The unit test that mocks script.queryCollections does not return a table 

481 # so we need the following `if`. 

482 if table: 

483 # When chains==TREE, the children of chained datasets are indented 

484 # relative to their parents. For this to work properly the table must 

485 # be left-aligned. 

486 table.pprint_all(align="<") 

487 

488 

489@click.command(cls=ButlerCommand) 

490@repo_argument(required=True) 

491@glob_argument( 

492 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

493 "dataset types to return." 

494) 

495@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

496@components_option() 

497@options_file_option() 

498def query_dataset_types(*args: Any, **kwargs: Any) -> None: 

499 """Get the dataset types in a repository.""" 

500 table = script.queryDatasetTypes(*args, **kwargs) 

501 if table: 

502 table.pprint_all() 

503 else: 

504 print("No results. Try --help for more information.") 

505 

506 

507@click.command(cls=ButlerCommand) 

508@repo_argument(required=True) 

509@click.argument("dataset-type-name", nargs=1) 

510def remove_dataset_type(*args: Any, **kwargs: Any) -> None: 

511 """Remove a dataset type definition from a repository.""" 

512 script.removeDatasetType(*args, **kwargs) 

513 

514 

515@click.command(cls=ButlerCommand) 

516@query_datasets_options() 

517@options_file_option() 

518def query_datasets(**kwargs: Any) -> None: 

519 """List the datasets in a repository.""" 

520 for table in script.QueryDatasets(**kwargs).getTables(): 

521 print("") 

522 table.pprint_all() 

523 print("") 

524 

525 

526@click.command(cls=ButlerCommand) 

527@repo_argument(required=True) 

528@click.argument("input-collection") 

529@click.argument("output-collection") 

530@click.argument("dataset-type-name") 

531@click.option( 

532 "--begin-date", 

533 type=str, 

534 default=None, 

535 help=unwrap( 

536 """ISO-8601 datetime (TAI) of the beginning of the validity range for the 

537 certified calibrations.""" 

538 ), 

539) 

540@click.option( 

541 "--end-date", 

542 type=str, 

543 default=None, 

544 help=unwrap( 

545 """ISO-8601 datetime (TAI) of the end of the validity range for the 

546 certified calibrations.""" 

547 ), 

548) 

549@click.option( 

550 "--search-all-inputs", 

551 is_flag=True, 

552 default=False, 

553 help=unwrap( 

554 """Search all children of the inputCollection if it is a CHAINED collection, 

555 instead of just the most recent one.""" 

556 ), 

557) 

558@options_file_option() 

559def certify_calibrations(*args: Any, **kwargs: Any) -> None: 

560 """Certify calibrations in a repository.""" 

561 script.certifyCalibrations(*args, **kwargs) 

562 

563 

564@click.command(cls=ButlerCommand) 

565@repo_argument(required=True) 

566@dimensions_argument( 

567 help=unwrap( 

568 """DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

569 instrument, or tract. Will be expanded to include any dependencies.""" 

570 ) 

571) 

572@collections_option(help=collections_option.help + " May only be used with --datasets.") 

573@datasets_option( 

574 help=unwrap( 

575 """An expression that fully or partially identifies dataset types that should 

576 constrain the yielded data IDs. For example, including "raw" here would 

577 constrain the yielded "instrument", "exposure", "detector", and 

578 "physical_filter" values to only those for which at least one "raw" dataset 

579 exists in "collections". Requires --collections.""" 

580 ) 

581) 

582@where_option(help=where_help) 

583@order_by_option() 

584@limit_option() 

585@offset_option() 

586@options_file_option() 

587def query_data_ids(**kwargs: Any) -> None: 

588 """List the data IDs in a repository.""" 

589 table, reason = script.queryDataIds(**kwargs) 

590 if table: 

591 table.pprint_all() 

592 else: 

593 if reason: 

594 print(reason) 

595 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

596 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

597 else: 

598 print("No results. Try --help for more information.") 

599 

600 

601@click.command(cls=ButlerCommand) 

602@repo_argument(required=True) 

603@element_argument(required=True) 

604@datasets_option( 

605 help=unwrap( 

606 """An expression that fully or partially identifies dataset types that should 

607 constrain the yielded records. May only be used with 

608 --collections.""" 

609 ) 

610) 

611@collections_option(help=collections_option.help + " May only be used with --datasets.") 

612@where_option(help=where_help) 

613@order_by_option() 

614@limit_option() 

615@offset_option() 

616@click.option( 

617 "--no-check", 

618 is_flag=True, 

619 help=unwrap( 

620 """Don't check the query before execution. By default the query is checked before it 

621 executed, this may reject some valid queries that resemble common mistakes.""" 

622 ), 

623) 

624@options_file_option() 

625def query_dimension_records(**kwargs: Any) -> None: 

626 """Query for dimension information.""" 

627 table = script.queryDimensionRecords(**kwargs) 

628 if table: 

629 table.pprint_all() 

630 else: 

631 print("No results. Try --help for more information.") 

632 

633 

634@click.command(cls=ButlerCommand) 

635@repo_argument(required=True) 

636@query_datasets_options(showUri=False, useArguments=False, repo=False) 

637@destination_argument(help="Destination URI of folder to receive file artifacts.") 

638@transfer_option() 

639@verbose_option(help="Report destination location of all transferred artifacts.") 

640@click.option( 

641 "--preserve-path/--no-preserve-path", 

642 is_flag=True, 

643 default=True, 

644 help="Preserve the datastore path to the artifact at the destination.", 

645) 

646@click.option( 

647 "--clobber/--no-clobber", 

648 is_flag=True, 

649 default=False, 

650 help="If clobber, overwrite files if they exist locally.", 

651) 

652@options_file_option() 

653def retrieve_artifacts(**kwargs: Any) -> None: 

654 """Retrieve file artifacts associated with datasets in a repository.""" 

655 verbose = kwargs.pop("verbose") 

656 transferred = script.retrieveArtifacts(**kwargs) 

657 if verbose and transferred: 

658 print(f"Transferred the following to {kwargs['destination']}:") 

659 for uri in transferred: 

660 print(uri) 

661 print() 

662 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

663 

664 

665@click.command(cls=ButlerCommand) 

666@click.argument("source", required=True) 

667@click.argument("dest", required=True) 

668@query_datasets_options(showUri=False, useArguments=False, repo=False) 

669@transfer_option() 

670@register_dataset_types_option() 

671@transfer_dimensions_option() 

672@options_file_option() 

673def transfer_datasets(**kwargs: Any) -> None: 

674 """Transfer datasets from a source butler to a destination butler. 

675 

676 SOURCE is a URI to the Butler repository containing the RUN dataset. 

677 

678 DEST is a URI to the Butler repository that will receive copies of the 

679 datasets. 

680 """ 

681 number = script.transferDatasets(**kwargs) 

682 print(f"Number of datasets transferred: {number}") 

683 

684 

685@click.command(cls=ButlerCommand) 

686@repo_argument(required=True) 

687@click.argument("parent", required=True, nargs=1) 

688@click.argument("children", required=False, nargs=-1, callback=split_commas) 

689@click.option( 

690 "--doc", 

691 default="", 

692 help="Documentation string associated with this collection. " 

693 "Only relevant if the collection is newly created.", 

694) 

695@click.option( 

696 "--flatten/--no-flatten", 

697 default=False, 

698 help="If `True` recursively flatten out any nested chained collections in children first.", 

699) 

700@click.option( 

701 "--mode", 

702 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

703 default="redefine", 

704 help="Update mode: " 

705 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

706 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

707 "'pop': Pop a numbered element off the chain. Defaults to popping " 

708 "the first element (0). ``children`` must be integers if given. " 

709 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

710 "'extend': Modify existing chain to extend it with the supplied CHILDREN.", 

711) 

712def collection_chain(**kwargs: Any) -> None: 

713 """Define a collection chain. 

714 

715 PARENT is the name of the chained collection to create or modify. If the 

716 collection already exists the chain associated with it will be updated. 

717 

718 CHILDREN are the collections to be used to modify the chain. The supplied 

719 values will be split on comma. The exact usage depends on the MODE option. 

720 For example, 

721 

722 $ butler collection-chain REPO PARENT child1,child2 child3 

723 

724 will result in three children being included in the chain. 

725 

726 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

727 collections to be removed from the current chain. 

728 MODE 'pop' can take negative integers to indicate removal relative to the 

729 end of the chain, but when doing that '--' must be given to indicate the 

730 end of the options specification. 

731 

732 $ butler collection-chain REPO --mode=pop PARENT -- -1 

733 

734 Will remove the final collection from the chain. 

735 """ 

736 chain = script.collectionChain(**kwargs) 

737 print(f"[{', '.join(chain)}]") 

738 

739 

740@click.command(cls=ButlerCommand) 

741@repo_argument(required=True) 

742@click.argument("dataset_type", required=True) 

743@click.argument("run", required=True) 

744@click.argument("table_file", required=True) 

745@click.option( 

746 "--formatter", 

747 type=str, 

748 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

749 " will be determined from the dataset type and datastore configuration.", 

750) 

751@click.option( 

752 "--id-generation-mode", 

753 default="UNIQUE", 

754 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

755 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

756 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

757 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

758 " repositories.", 

759 callback=to_upper, 

760 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False), 

761) 

762@click.option( 

763 "--data-id", 

764 type=str, 

765 multiple=True, 

766 callback=split_commas, 

767 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

768 " files. This can be used to simplify the table file by removing repeated entries that are" 

769 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

770 " comma separation or multiple command line options.", 

771) 

772@click.option( 

773 "--prefix", 

774 type=str, 

775 help="For relative paths in the table file, specify a prefix to use. The default is to" 

776 " use the current working directory.", 

777) 

778@transfer_option() 

779def ingest_files(**kwargs: Any) -> None: 

780 """Ingest files from table file. 

781 

782 DATASET_TYPE is the name of the dataset type to be associated with these 

783 files. This dataset type must already exist and will not be created by 

784 this command. There can only be one dataset type per invocation of this 

785 command. 

786 

787 RUN is the run to use for the file ingest. 

788 

789 TABLE_FILE refers to a file that can be read by astropy.table with 

790 columns of: 

791 

792 file URI, dimension1, dimension2, ..., dimensionN 

793 

794 where the first column is the URI to the file to be ingested and the 

795 remaining columns define the dataId to associate with that file. 

796 The column names should match the dimensions for the specified dataset 

797 type. Relative file URI by default is assumed to be relative to the 

798 current working directory but can be overridden using the ``--prefix`` 

799 option. 

800 

801 This command does not create dimension records and so any records must 

802 be created by other means. This command should not be used to ingest 

803 raw camera exposures. 

804 """ 

805 script.ingest_files(**kwargs) 

806 

807 

808@click.command(cls=ButlerCommand) 

809@repo_argument(required=True) 

810@click.argument("dataset_type", required=True) 

811@click.argument("storage_class", required=True) 

812@click.argument("dimensions", required=False, nargs=-1) 

813@click.option( 

814 "--is-calibration/--no-is-calibration", 

815 is_flag=True, 

816 default=False, 

817 help="Indicate that this dataset type can be part of a calibration collection.", 

818) 

819def register_dataset_type(**kwargs: Any) -> None: 

820 """Register a new dataset type with this butler repository. 

821 

822 DATASET_TYPE is the name of the dataset type. 

823 

824 STORAGE_CLASS is the name of the StorageClass to be associated with 

825 this dataset type. 

826 

827 DIMENSIONS is a list of all the dimensions relevant to this 

828 dataset type. It can be an empty list. 

829 

830 A component dataset type (such as "something.component") is not a 

831 real dataset type and so can not be defined by this command. They are 

832 automatically derived from the composite dataset type when a composite 

833 storage class is specified. 

834 """ 

835 inserted = script.register_dataset_type(**kwargs) 

836 if inserted: 

837 print("Dataset type successfully registered.") 

838 else: 

839 print("Dataset type already existed in identical form.") 

840 

841 

842@click.command(cls=ButlerCommand) 

843@repo_argument(required=True) 

844@directory_argument(required=True, help="DIRECTORY is the folder to receive the exported calibrations.") 

845@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

846@dataset_type_option(help="Specific DatasetType(s) to export.", multiple=True) 

847@transfer_option() 

848def export_calibs(*args: Any, **kwargs: Any) -> None: 

849 """Export calibrations from the butler for import elsewhere.""" 

850 table = script.exportCalibs(*args, **kwargs) 

851 if table: 

852 table.pprint_all(align="<")