Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 56%

311 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2023-01-06 01:42 -0800

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23__all__ = () 

24 

25from collections.abc import Callable 

26from typing import Any, cast 

27 

28import click 

29from deprecated.sphinx import deprecated 

30 

31from ... import script 

32from .. import utils as cmd_utils 

33from ..opt import ( 

34 collection_argument, 

35 collection_type_option, 

36 collections_argument, 

37 collections_option, 

38 components_option, 

39 confirm_option, 

40 dataset_type_option, 

41 datasets_option, 

42 destination_argument, 

43 dimensions_argument, 

44 directory_argument, 

45 element_argument, 

46 glob_argument, 

47 limit_option, 

48 offset_option, 

49 options_file_option, 

50 order_by_option, 

51 query_datasets_options, 

52 register_dataset_types_option, 

53 repo_argument, 

54 transfer_option, 

55 verbose_option, 

56 where_option, 

57) 

58from ..utils import ( 

59 ButlerCommand, 

60 MWOptionDecorator, 

61 option_section, 

62 printAstropyTables, 

63 typeStrAcceptsMultiple, 

64 unwrap, 

65 where_help, 

66) 

67 

68# Cast the callback signatures to appease mypy since mypy thinks they 

69# are too constrained. 

70split_commas = cast( 

71 Callable[[click.Context, click.Option | click.Parameter, Any], Any], cmd_utils.split_commas 

72) 

73to_upper = cast(Callable[[click.Context, click.Option | click.Parameter, Any], Any], cmd_utils.to_upper) 

74 

75willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

76existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

77 

78 

79@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

80@repo_argument(required=True) 

81@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

82@query_datasets_options(repo=False, showUri=False, useArguments=False) 

83@options_file_option() 

84def associate(**kwargs: Any) -> None: 

85 """Add existing datasets to a tagged collection; searches for datasets with 

86 the options and adds them to the named COLLECTION. 

87 """ 

88 script.associate(**kwargs) 

89 

90 

91# The conversion from the import command name to the butler_import function 

92# name for subcommand lookup is implemented in the cli/butler.py, in 

93# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

94# must be reflected in that location. If this becomes a common pattern a better 

95# mechanism should be implemented. 

96@click.command("import", cls=ButlerCommand) 

97@repo_argument(required=True, help=willCreateRepoHelp) 

98@directory_argument(required=True) 

99@transfer_option() 

100@click.option( 

101 "--export-file", 

102 help="Name for the file that contains database information associated with the exported " 

103 "datasets. If this is not an absolute path, does not exist in the current working " 

104 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

105 'to "export.yaml".', 

106 type=click.File(mode="r"), 

107) 

108@click.option( 

109 "--skip-dimensions", 

110 "-s", 

111 type=str, 

112 multiple=True, 

113 callback=split_commas, 

114 metavar=typeStrAcceptsMultiple, 

115 help="Dimensions that should be skipped during import", 

116) 

117@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

118@options_file_option() 

119def butler_import(*args: Any, **kwargs: Any) -> None: 

120 """Import data into a butler repository.""" 

121 script.butlerImport(*args, **kwargs) 

122 

123 

124@click.command(cls=ButlerCommand) 

125@repo_argument(required=True, help=willCreateRepoHelp) 

126@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

127@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

128@click.option( 

129 "--standalone", 

130 is_flag=True, 

131 help="Include all defaults in the config file in the repo, " 

132 "insulating the repo from changes in package defaults.", 

133) 

134@click.option( 

135 "--override", is_flag=True, help="Allow values in the supplied config to override all repo settings." 

136) 

137@click.option( 

138 "--outfile", 

139 "-f", 

140 default=None, 

141 type=str, 

142 help="Name of output file to receive repository " 

143 "configuration. Default is to write butler.yaml into the specified repo.", 

144) 

145@options_file_option() 

146def create(*args: Any, **kwargs: Any) -> None: 

147 """Create an empty Gen3 Butler repository.""" 

148 script.createRepo(*args, **kwargs) 

149 

150 

151@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

152@repo_argument(required=True, help=existingRepoHelp) 

153@click.option( 

154 "--subset", 

155 "-s", 

156 type=str, 

157 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

158 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.", 

159) 

160@click.option( 

161 "--searchpath", 

162 "-p", 

163 type=str, 

164 multiple=True, 

165 callback=split_commas, 

166 metavar=typeStrAcceptsMultiple, 

167 help="Additional search paths to use for configuration overrides", 

168) 

169@click.option( 

170 "--file", 

171 "outfile", 

172 type=click.File(mode="w"), 

173 default="-", 

174 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout " 

175 "by default.", 

176) 

177@options_file_option() 

178def config_dump(*args: Any, **kwargs: Any) -> None: 

179 """Dump either a subset or full Butler configuration to standard output.""" 

180 script.configDump(*args, **kwargs) 

181 

182 

183@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

184@repo_argument(required=True, help=existingRepoHelp) 

185@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

186@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

187@click.option( 

188 "--ignore", 

189 "-i", 

190 type=str, 

191 multiple=True, 

192 callback=split_commas, 

193 metavar=typeStrAcceptsMultiple, 

194 help="DatasetType(s) to ignore for validation.", 

195) 

196@options_file_option() 

197def config_validate(*args: Any, **kwargs: Any) -> None: 

198 """Validate the configuration files for a Gen3 Butler repository.""" 

199 is_good = script.configValidate(*args, **kwargs) 

200 if not is_good: 

201 raise click.exceptions.Exit(1) 

202 

203 

204@click.command(cls=ButlerCommand) 

205@repo_argument(required=True) 

206@collection_argument( 

207 help=unwrap( 

208 """COLLECTION is the Name of the collection to remove. If this is a tagged or 

209 chained collection, datasets within the collection are not modified unless --unstore 

210 is passed. If this is a run collection, --purge and --unstore must be passed, and 

211 all datasets in it are fully removed from the data repository.""" 

212 ) 

213) 

214@click.option( 

215 "--purge", 

216 help=unwrap( 

217 """Permit RUN collections to be removed, fully removing datasets within them. 

218 Requires --unstore as an added precaution against accidental deletion. Must not be 

219 passed if the collection is not a RUN.""" 

220 ), 

221 is_flag=True, 

222) 

223@click.option( 

224 "--unstore", 

225 help=("""Remove all datasets in the collection from all datastores in which they appear."""), 

226 is_flag=True, 

227) 

228@click.option( 

229 "--unlink", 

230 help="Before removing the given `collection` unlink it from from this parent collection.", 

231 multiple=True, 

232 callback=split_commas, 

233) 

234@confirm_option() 

235@options_file_option() 

236@deprecated( 

237 reason="Please consider using remove-collections or remove-runs instead. Will be removed after v24.", 

238 version="v24.0", 

239 category=FutureWarning, 

240) 

241def prune_collection(**kwargs: Any) -> None: 

242 """Remove a collection and possibly prune datasets within it.""" 

243 result = script.pruneCollection(**kwargs) 

244 if result.confirm: 

245 print("The following collections will be removed:") 

246 result.removeTable.pprint_all(align="<") 

247 doContinue = click.confirm(text="Continue?", default=False) 

248 else: 

249 doContinue = True 

250 if doContinue: 

251 result.onConfirmation() 

252 print("Removed collections.") 

253 else: 

254 print("Aborted.") 

255 

256 

257pruneDatasets_wouldRemoveMsg = unwrap( 

258 """The following datasets will be removed from any datastores in which 

259 they are present:""" 

260) 

261pruneDatasets_wouldDisassociateMsg = unwrap( 

262 """The following datasets will be disassociated from {collections} 

263 if they are currently present in it (which is not checked):""" 

264) 

265pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap( 

266 """The following datasets will be disassociated from 

267 {collections} if they are currently present in it (which is 

268 not checked), and removed from any datastores in which they 

269 are present.""" 

270) 

271pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

272pruneDatasets_askContinueMsg = "Continue?" 

273pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

274pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

275pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

276pruneDatasets_noDatasetsFound = "Did not find any datasets." 

277pruneDatasets_errPurgeAndDisassociate = unwrap( 

278 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

279 collections, but --purge forces disassociation from all of them. """ 

280) 

281pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

282pruneDatasets_errNoCollectionRestriction = unwrap( 

283 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all 

284 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

285 collection, or by using --disassociate to select a tagged collection.""" 

286) 

287pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

288pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

289 

290disassociate_option = MWOptionDecorator( 

291 "--disassociate", 

292 "disassociate_tags", 

293 help=unwrap( 

294 """Disassociate pruned datasets from the given tagged collections. May not be used with 

295 --purge.""" 

296 ), 

297 multiple=True, 

298 callback=split_commas, 

299 metavar="TAG", 

300) 

301 

302 

303purge_option = MWOptionDecorator( 

304 "--purge", 

305 "purge_run", 

306 help=unwrap( 

307 """Completely remove the dataset from the given RUN in the Registry. May not be used with 

308 --disassociate. Note, this may remove provenance information from datasets other than those 

309 provided, and should be used with extreme care. RUN has to provided for backward 

310 compatibility, but datasets will be removed from any RUN-type collections.""" 

311 ), 

312 metavar="RUN", 

313) 

314 

315 

316find_all_option = MWOptionDecorator( 

317 "--find-all", 

318 is_flag=True, 

319 help=unwrap( 

320 """Purge the dataset results from all of the collections in which a dataset of that dataset 

321 type + data id combination appear. (By default only the first found dataset type + data id is 

322 purged, according to the order of COLLECTIONS passed in).""" 

323 ), 

324) 

325 

326 

327unstore_option = MWOptionDecorator( 

328 "--unstore", 

329 is_flag=True, 

330 help=unwrap( 

331 """Remove these datasets from all datastores configured with this data repository. If 

332 --disassociate and --purge are not used then --unstore will be used by default. Note that 

333 --unstore will make it impossible to retrieve these datasets even via other collections. 

334 Datasets that are already not stored are ignored by this option.""" 

335 ), 

336) 

337 

338 

339dry_run_option = MWOptionDecorator( 

340 "--dry-run", 

341 is_flag=True, 

342 help=unwrap( 

343 """Display the datasets that would be removed but do not remove them. 

344 

345 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

346 will remove it from all of them, even though the only one this will show is its RUN 

347 collection.""" 

348 ), 

349) 

350 

351 

352quiet_option = MWOptionDecorator( 

353 "--quiet", 

354 is_flag=True, 

355 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed."""), 

356) 

357 

358 

359@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

360@repo_argument(required=True) 

361@collections_argument( 

362 help=unwrap( 

363 """COLLECTIONS is or more expressions that identify the collections to 

364 search for datasets. Glob-style expressions may be used but only if the 

365 --find-all flag is also passed.""" 

366 ) 

367) 

368@option_section("Query Datasets Options:") 

369@datasets_option( 

370 help="One or more glob-style expressions that identify the dataset types to be pruned.", 

371 multiple=True, 

372 callback=split_commas, 

373) 

374@find_all_option() 

375@where_option(help=where_help) 

376@option_section("Prune Options:") 

377@disassociate_option() 

378@purge_option() 

379@unstore_option() 

380@option_section("Execution Options:") 

381@dry_run_option() 

382@confirm_option() 

383@quiet_option() 

384@option_section("Other Options:") 

385@options_file_option() 

386def prune_datasets(**kwargs: Any) -> None: 

387 """Query for and remove one or more datasets from a collection and/or 

388 storage. 

389 """ 

390 quiet = kwargs.pop("quiet", False) 

391 if quiet: 

392 if kwargs["dry_run"]: 

393 raise click.ClickException(message=pruneDatasets_errQuietWithDryRun) 

394 kwargs["confirm"] = False 

395 

396 result = script.pruneDatasets(**kwargs) 

397 

398 if result.errPurgeAndDisassociate: 

399 raise click.ClickException(message=pruneDatasets_errPurgeAndDisassociate) 

400 if result.errNoCollectionRestriction: 

401 raise click.ClickException(message=pruneDatasets_errNoCollectionRestriction) 

402 if result.errPruneOnNotRun: 

403 raise click.ClickException(message=pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

404 if result.errNoOp: 

405 raise click.ClickException(message=pruneDatasets_errNoOp) 

406 if result.dryRun: 

407 assert result.action is not None, "Dry run results have not been set up properly." 

408 if result.action["disassociate"] and result.action["unstore"]: 

409 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

410 elif result.action["disassociate"]: 

411 msg = pruneDatasets_wouldDisassociateMsg 

412 else: 

413 msg = pruneDatasets_wouldRemoveMsg 

414 print(msg.format(**result.action)) 

415 printAstropyTables(result.tables) 

416 return 

417 if result.confirm: 

418 if not result.tables: 

419 print(pruneDatasets_noDatasetsFound) 

420 return 

421 print(pruneDatasets_willRemoveMsg) 

422 printAstropyTables(result.tables) 

423 doContinue = click.confirm(text=pruneDatasets_askContinueMsg, default=False) 

424 if doContinue: 

425 if result.onConfirmation: 

426 result.onConfirmation() 

427 print(pruneDatasets_didRemoveAforementioned) 

428 else: 

429 print(pruneDatasets_didNotRemoveAforementioned) 

430 return 

431 if result.finished: 

432 if not quiet: 

433 print(pruneDatasets_didRemoveMsg) 

434 printAstropyTables(result.tables) 

435 return 

436 

437 

438@click.command(short_help="Search for collections.", cls=ButlerCommand) 

439@repo_argument(required=True) 

440@glob_argument( 

441 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

442 "collections to return." 

443) 

444@collection_type_option() 

445@click.option( 

446 "--chains", 

447 default="TREE", 

448 help="""Affects how results are presented: 

449 

450 TABLE lists each dataset in table form, with columns for dataset name 

451 and type, and a column that lists children of CHAINED datasets (if any 

452 CHAINED datasets are found). 

453 

454 INVERSE-TABLE is like TABLE but instead of a column listing CHAINED 

455 dataset children, it lists the parents of the dataset if it is contained 

456 in any CHAINED collections. 

457 

458 TREE recursively lists children below each CHAINED dataset in tree form. 

459 

460 INVERSE-TREE recursively lists parent datasets below each dataset in 

461 tree form. 

462 

463 FLATTEN lists all datasets, including child datasets, in one list. 

464 

465 [default: TREE]""", 

466 # above, the default value is included, instead of using show_default, so 

467 # that the default is printed on its own line instead of coming right after 

468 # the FLATTEN text. 

469 callback=to_upper, 

470 type=click.Choice( 

471 choices=("TABLE", "INVERSE-TABLE", "TREE", "INVERSE-TREE", "FLATTEN"), 

472 case_sensitive=False, 

473 ), 

474) 

475@options_file_option() 

476def query_collections(*args: Any, **kwargs: Any) -> None: 

477 """Get the collections whose names match an expression.""" 

478 table = script.queryCollections(*args, **kwargs) 

479 # The unit test that mocks script.queryCollections does not return a table 

480 # so we need the following `if`. 

481 if table: 

482 # When chains==TREE, the children of chained datasets are indented 

483 # relative to their parents. For this to work properly the table must 

484 # be left-aligned. 

485 table.pprint_all(align="<") 

486 

487 

488@click.command(cls=ButlerCommand) 

489@repo_argument(required=True) 

490@glob_argument( 

491 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

492 "dataset types to return." 

493) 

494@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

495@components_option() 

496@options_file_option() 

497def query_dataset_types(*args: Any, **kwargs: Any) -> None: 

498 """Get the dataset types in a repository.""" 

499 table = script.queryDatasetTypes(*args, **kwargs) 

500 if table: 

501 table.pprint_all() 

502 else: 

503 print("No results. Try --help for more information.") 

504 

505 

506@click.command(cls=ButlerCommand) 

507@repo_argument(required=True) 

508@click.argument("dataset-type-name", nargs=1) 

509def remove_dataset_type(*args: Any, **kwargs: Any) -> None: 

510 """Remove a dataset type definition from a repository.""" 

511 script.removeDatasetType(*args, **kwargs) 

512 

513 

514@click.command(cls=ButlerCommand) 

515@query_datasets_options() 

516@options_file_option() 

517def query_datasets(**kwargs: Any) -> None: 

518 """List the datasets in a repository.""" 

519 for table in script.QueryDatasets(**kwargs).getTables(): 

520 print("") 

521 table.pprint_all() 

522 print("") 

523 

524 

525@click.command(cls=ButlerCommand) 

526@repo_argument(required=True) 

527@click.argument("input-collection") 

528@click.argument("output-collection") 

529@click.argument("dataset-type-name") 

530@click.option( 

531 "--begin-date", 

532 type=str, 

533 default=None, 

534 help=unwrap( 

535 """ISO-8601 datetime (TAI) of the beginning of the validity range for the 

536 certified calibrations.""" 

537 ), 

538) 

539@click.option( 

540 "--end-date", 

541 type=str, 

542 default=None, 

543 help=unwrap( 

544 """ISO-8601 datetime (TAI) of the end of the validity range for the 

545 certified calibrations.""" 

546 ), 

547) 

548@click.option( 

549 "--search-all-inputs", 

550 is_flag=True, 

551 default=False, 

552 help=unwrap( 

553 """Search all children of the inputCollection if it is a CHAINED collection, 

554 instead of just the most recent one.""" 

555 ), 

556) 

557@options_file_option() 

558def certify_calibrations(*args: Any, **kwargs: Any) -> None: 

559 """Certify calibrations in a repository.""" 

560 script.certifyCalibrations(*args, **kwargs) 

561 

562 

563@click.command(cls=ButlerCommand) 

564@repo_argument(required=True) 

565@dimensions_argument( 

566 help=unwrap( 

567 """DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

568 instrument, or tract. Will be expanded to include any dependencies.""" 

569 ) 

570) 

571@collections_option(help=collections_option.help + " May only be used with --datasets.") 

572@datasets_option( 

573 help=unwrap( 

574 """An expression that fully or partially identifies dataset types that should 

575 constrain the yielded data IDs. For example, including "raw" here would 

576 constrain the yielded "instrument", "exposure", "detector", and 

577 "physical_filter" values to only those for which at least one "raw" dataset 

578 exists in "collections". Requires --collections.""" 

579 ) 

580) 

581@where_option(help=where_help) 

582@order_by_option() 

583@limit_option() 

584@offset_option() 

585@options_file_option() 

586def query_data_ids(**kwargs: Any) -> None: 

587 """List the data IDs in a repository.""" 

588 table, reason = script.queryDataIds(**kwargs) 

589 if table: 

590 table.pprint_all() 

591 else: 

592 if reason: 

593 print(reason) 

594 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

595 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

596 else: 

597 print("No results. Try --help for more information.") 

598 

599 

600@click.command(cls=ButlerCommand) 

601@repo_argument(required=True) 

602@element_argument(required=True) 

603@datasets_option( 

604 help=unwrap( 

605 """An expression that fully or partially identifies dataset types that should 

606 constrain the yielded records. May only be used with 

607 --collections.""" 

608 ) 

609) 

610@collections_option(help=collections_option.help + " May only be used with --datasets.") 

611@where_option(help=where_help) 

612@order_by_option() 

613@limit_option() 

614@offset_option() 

615@click.option( 

616 "--no-check", 

617 is_flag=True, 

618 help=unwrap( 

619 """Don't check the query before execution. By default the query is checked before it 

620 executed, this may reject some valid queries that resemble common mistakes.""" 

621 ), 

622) 

623@options_file_option() 

624def query_dimension_records(**kwargs: Any) -> None: 

625 """Query for dimension information.""" 

626 table = script.queryDimensionRecords(**kwargs) 

627 if table: 

628 table.pprint_all() 

629 else: 

630 print("No results. Try --help for more information.") 

631 

632 

633@click.command(cls=ButlerCommand) 

634@repo_argument(required=True) 

635@query_datasets_options(showUri=False, useArguments=False, repo=False) 

636@destination_argument(help="Destination URI of folder to receive file artifacts.") 

637@transfer_option() 

638@verbose_option(help="Report destination location of all transferred artifacts.") 

639@click.option( 

640 "--preserve-path/--no-preserve-path", 

641 is_flag=True, 

642 default=True, 

643 help="Preserve the datastore path to the artifact at the destination.", 

644) 

645@click.option( 

646 "--clobber/--no-clobber", 

647 is_flag=True, 

648 default=False, 

649 help="If clobber, overwrite files if they exist locally.", 

650) 

651@options_file_option() 

652def retrieve_artifacts(**kwargs: Any) -> None: 

653 """Retrieve file artifacts associated with datasets in a repository.""" 

654 verbose = kwargs.pop("verbose") 

655 transferred = script.retrieveArtifacts(**kwargs) 

656 if verbose and transferred: 

657 print(f"Transferred the following to {kwargs['destination']}:") 

658 for uri in transferred: 

659 print(uri) 

660 print() 

661 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

662 

663 

664@click.command(cls=ButlerCommand) 

665@click.argument("source", required=True) 

666@click.argument("dest", required=True) 

667@query_datasets_options(showUri=False, useArguments=False, repo=False) 

668@transfer_option() 

669@register_dataset_types_option() 

670@click.option( 

671 "--transfer-dimensions/--no-transfer-dimensions", 

672 is_flag=True, 

673 default=True, 

674 help=unwrap( 

675 """If true, also copy dimension records along with datasets. 

676 If the dmensions are already present in the destination butler it 

677 can be more efficient to disable this. The default is to transfer 

678 dimensions.""" 

679 ), 

680) 

681@options_file_option() 

682def transfer_datasets(**kwargs: Any) -> None: 

683 """Transfer datasets from a source butler to a destination butler. 

684 

685 SOURCE is a URI to the Butler repository containing the RUN dataset. 

686 

687 DEST is a URI to the Butler repository that will receive copies of the 

688 datasets. 

689 """ 

690 number = script.transferDatasets(**kwargs) 

691 print(f"Number of datasets transferred: {number}") 

692 

693 

694@click.command(cls=ButlerCommand) 

695@repo_argument(required=True) 

696@click.argument("parent", required=True, nargs=1) 

697@click.argument("children", required=False, nargs=-1, callback=split_commas) 

698@click.option( 

699 "--doc", 

700 default="", 

701 help="Documentation string associated with this collection. " 

702 "Only relevant if the collection is newly created.", 

703) 

704@click.option( 

705 "--flatten/--no-flatten", 

706 default=False, 

707 help="If `True` recursively flatten out any nested chained collections in children first.", 

708) 

709@click.option( 

710 "--mode", 

711 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

712 default="redefine", 

713 help="Update mode: " 

714 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

715 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

716 "'pop': Pop a numbered element off the chain. Defaults to popping " 

717 "the first element (0). ``children`` must be integers if given. " 

718 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

719 "'extend': Modify existing chain to extend it with the supplied CHILDREN.", 

720) 

721def collection_chain(**kwargs: Any) -> None: 

722 """Define a collection chain. 

723 

724 PARENT is the name of the chained collection to create or modify. If the 

725 collection already exists the chain associated with it will be updated. 

726 

727 CHILDREN are the collections to be used to modify the chain. The supplied 

728 values will be split on comma. The exact usage depends on the MODE option. 

729 For example, 

730 

731 $ butler collection-chain REPO PARENT child1,child2 child3 

732 

733 will result in three children being included in the chain. 

734 

735 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

736 collections to be removed from the current chain. 

737 MODE 'pop' can take negative integers to indicate removal relative to the 

738 end of the chain, but when doing that '--' must be given to indicate the 

739 end of the options specification. 

740 

741 $ butler collection-chain REPO --mode=pop PARENT -- -1 

742 

743 Will remove the final collection from the chain. 

744 """ 

745 chain = script.collectionChain(**kwargs) 

746 print(f"[{', '.join(chain)}]") 

747 

748 

749@click.command(cls=ButlerCommand) 

750@repo_argument(required=True) 

751@click.argument("dataset_type", required=True) 

752@click.argument("run", required=True) 

753@click.argument("table_file", required=True) 

754@click.option( 

755 "--formatter", 

756 type=str, 

757 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

758 " will be determined from the dataset type and datastore configuration.", 

759) 

760@click.option( 

761 "--id-generation-mode", 

762 default="UNIQUE", 

763 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

764 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

765 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

766 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

767 " repositories.", 

768 callback=to_upper, 

769 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False), 

770) 

771@click.option( 

772 "--data-id", 

773 type=str, 

774 multiple=True, 

775 callback=split_commas, 

776 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

777 " files. This can be used to simplify the table file by removing repeated entries that are" 

778 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

779 " comma separation or multiple command line options.", 

780) 

781@click.option( 

782 "--prefix", 

783 type=str, 

784 help="For relative paths in the table file, specify a prefix to use. The default is to" 

785 " use the current working directory.", 

786) 

787@transfer_option() 

788def ingest_files(**kwargs: Any) -> None: 

789 """Ingest files from table file. 

790 

791 DATASET_TYPE is the name of the dataset type to be associated with these 

792 files. This dataset type must already exist and will not be created by 

793 this command. There can only be one dataset type per invocation of this 

794 command. 

795 

796 RUN is the run to use for the file ingest. 

797 

798 TABLE_FILE refers to a file that can be read by astropy.table with 

799 columns of: 

800 

801 file URI, dimension1, dimension2, ..., dimensionN 

802 

803 where the first column is the URI to the file to be ingested and the 

804 remaining columns define the dataId to associate with that file. 

805 The column names should match the dimensions for the specified dataset 

806 type. Relative file URI by default is assumed to be relative to the 

807 current working directory but can be overridden using the ``--prefix`` 

808 option. 

809 

810 This command does not create dimension records and so any records must 

811 be created by other means. This command should not be used to ingest 

812 raw camera exposures. 

813 """ 

814 script.ingest_files(**kwargs) 

815 

816 

817@click.command(cls=ButlerCommand) 

818@repo_argument(required=True) 

819@click.argument("dataset_type", required=True) 

820@click.argument("storage_class", required=True) 

821@click.argument("dimensions", required=False, nargs=-1) 

822@click.option( 

823 "--is-calibration/--no-is-calibration", 

824 is_flag=True, 

825 default=False, 

826 help="Indicate that this dataset type can be part of a calibration collection.", 

827) 

828def register_dataset_type(**kwargs: Any) -> None: 

829 """Register a new dataset type with this butler repository. 

830 

831 DATASET_TYPE is the name of the dataset type. 

832 

833 STORAGE_CLASS is the name of the StorageClass to be associated with 

834 this dataset type. 

835 

836 DIMENSIONS is a list of all the dimensions relevant to this 

837 dataset type. It can be an empty list. 

838 

839 A component dataset type (such as "something.component") is not a 

840 real dataset type and so can not be defined by this command. They are 

841 automatically derived from the composite dataset type when a composite 

842 storage class is specified. 

843 """ 

844 inserted = script.register_dataset_type(**kwargs) 

845 if inserted: 

846 print("Dataset type successfully registered.") 

847 else: 

848 print("Dataset type already existed in identical form.") 

849 

850 

851@click.command(cls=ButlerCommand) 

852@repo_argument(required=True) 

853@directory_argument(required=True, help="DIRECTORY is the folder to receive the exported calibrations.") 

854@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

855@dataset_type_option(help="Specific DatasetType(s) to export.", multiple=True) 

856@transfer_option() 

857def export_calibs(*args: Any, **kwargs: Any) -> None: 

858 """Export calibrations from the butler for import elsewhere.""" 

859 table = script.exportCalibs(*args, **kwargs) 

860 if table: 

861 table.pprint_all(align="<")