Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 56%

303 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-10-29 02:20 -0700

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = () 

23 

24import click 

25from deprecated.sphinx import deprecated 

26 

27from ... import script 

28from ..opt import ( 

29 collection_argument, 

30 collection_type_option, 

31 collections_argument, 

32 collections_option, 

33 components_option, 

34 confirm_option, 

35 dataset_type_option, 

36 datasets_option, 

37 destination_argument, 

38 dimensions_argument, 

39 directory_argument, 

40 element_argument, 

41 glob_argument, 

42 limit_option, 

43 offset_option, 

44 options_file_option, 

45 order_by_option, 

46 query_datasets_options, 

47 register_dataset_types_option, 

48 repo_argument, 

49 transfer_option, 

50 verbose_option, 

51 where_option, 

52) 

53from ..utils import ( 

54 ButlerCommand, 

55 MWOptionDecorator, 

56 option_section, 

57 printAstropyTables, 

58 split_commas, 

59 to_upper, 

60 typeStrAcceptsMultiple, 

61 unwrap, 

62 where_help, 

63) 

64 

65willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

66existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

67 

68 

69@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

70@repo_argument(required=True) 

71@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

72@query_datasets_options(repo=False, showUri=False, useArguments=False) 

73@options_file_option() 

74def associate(**kwargs): 

75 """Add existing datasets to a tagged collection; searches for datasets with 

76 the options and adds them to the named COLLECTION. 

77 """ 

78 script.associate(**kwargs) 

79 

80 

81# The conversion from the import command name to the butler_import function 

82# name for subcommand lookup is implemented in the cli/butler.py, in 

83# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

84# must be reflected in that location. If this becomes a common pattern a better 

85# mechanism should be implemented. 

86@click.command("import", cls=ButlerCommand) 

87@repo_argument(required=True, help=willCreateRepoHelp) 

88@directory_argument(required=True) 

89@transfer_option() 

90@click.option( 

91 "--export-file", 

92 help="Name for the file that contains database information associated with the exported " 

93 "datasets. If this is not an absolute path, does not exist in the current working " 

94 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

95 'to "export.yaml".', 

96 type=click.File(mode="r"), 

97) 

98@click.option( 

99 "--skip-dimensions", 

100 "-s", 

101 type=str, 

102 multiple=True, 

103 callback=split_commas, 

104 metavar=typeStrAcceptsMultiple, 

105 help="Dimensions that should be skipped during import", 

106) 

107@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

108@options_file_option() 

109def butler_import(*args, **kwargs): 

110 """Import data into a butler repository.""" 

111 script.butlerImport(*args, **kwargs) 

112 

113 

114@click.command(cls=ButlerCommand) 

115@repo_argument(required=True, help=willCreateRepoHelp) 

116@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

117@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

118@click.option( 

119 "--standalone", 

120 is_flag=True, 

121 help="Include all defaults in the config file in the repo, " 

122 "insulating the repo from changes in package defaults.", 

123) 

124@click.option( 

125 "--override", is_flag=True, help="Allow values in the supplied config to override all repo settings." 

126) 

127@click.option( 

128 "--outfile", 

129 "-f", 

130 default=None, 

131 type=str, 

132 help="Name of output file to receive repository " 

133 "configuration. Default is to write butler.yaml into the specified repo.", 

134) 

135@options_file_option() 

136def create(*args, **kwargs): 

137 """Create an empty Gen3 Butler repository.""" 

138 script.createRepo(*args, **kwargs) 

139 

140 

141@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

142@repo_argument(required=True, help=existingRepoHelp) 

143@click.option( 

144 "--subset", 

145 "-s", 

146 type=str, 

147 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

148 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.", 

149) 

150@click.option( 

151 "--searchpath", 

152 "-p", 

153 type=str, 

154 multiple=True, 

155 callback=split_commas, 

156 metavar=typeStrAcceptsMultiple, 

157 help="Additional search paths to use for configuration overrides", 

158) 

159@click.option( 

160 "--file", 

161 "outfile", 

162 type=click.File(mode="w"), 

163 default="-", 

164 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout " 

165 "by default.", 

166) 

167@options_file_option() 

168def config_dump(*args, **kwargs): 

169 """Dump either a subset or full Butler configuration to standard output.""" 

170 script.configDump(*args, **kwargs) 

171 

172 

173@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

174@repo_argument(required=True, help=existingRepoHelp) 

175@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

176@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

177@click.option( 

178 "--ignore", 

179 "-i", 

180 type=str, 

181 multiple=True, 

182 callback=split_commas, 

183 metavar=typeStrAcceptsMultiple, 

184 help="DatasetType(s) to ignore for validation.", 

185) 

186@options_file_option() 

187def config_validate(*args, **kwargs): 

188 """Validate the configuration files for a Gen3 Butler repository.""" 

189 is_good = script.configValidate(*args, **kwargs) 

190 if not is_good: 

191 raise click.exceptions.Exit(1) 

192 

193 

194@click.command(cls=ButlerCommand) 

195@repo_argument(required=True) 

196@collection_argument( 

197 help=unwrap( 

198 """COLLECTION is the Name of the collection to remove. If this is a tagged or 

199 chained collection, datasets within the collection are not modified unless --unstore 

200 is passed. If this is a run collection, --purge and --unstore must be passed, and 

201 all datasets in it are fully removed from the data repository.""" 

202 ) 

203) 

204@click.option( 

205 "--purge", 

206 help=unwrap( 

207 """Permit RUN collections to be removed, fully removing datasets within them. 

208 Requires --unstore as an added precaution against accidental deletion. Must not be 

209 passed if the collection is not a RUN.""" 

210 ), 

211 is_flag=True, 

212) 

213@click.option( 

214 "--unstore", 

215 help=("""Remove all datasets in the collection from all datastores in which they appear."""), 

216 is_flag=True, 

217) 

218@click.option( 

219 "--unlink", 

220 help="Before removing the given `collection` unlink it from from this parent collection.", 

221 multiple=True, 

222 callback=split_commas, 

223) 

224@confirm_option() 

225@options_file_option() 

226@deprecated( 

227 reason="Please consider using remove-collections or remove-runs instead. Will be removed after v24.", 

228 version="v24.0", 

229 category=FutureWarning, 

230) 

231def prune_collection(**kwargs): 

232 """Remove a collection and possibly prune datasets within it.""" 

233 result = script.pruneCollection(**kwargs) 

234 if result.confirm: 

235 print("The following collections will be removed:") 

236 result.removeTable.pprint_all(align="<") 

237 doContinue = click.confirm(text="Continue?", default=False) 

238 else: 

239 doContinue = True 

240 if doContinue: 

241 result.onConfirmation() 

242 print("Removed collections.") 

243 else: 

244 print("Aborted.") 

245 

246 

247pruneDatasets_wouldRemoveMsg = unwrap( 

248 """The following datasets will be removed from any datastores in which 

249 they are present:""" 

250) 

251pruneDatasets_wouldDisassociateMsg = unwrap( 

252 """The following datasets will be disassociated from {collections} 

253 if they are currently present in it (which is not checked):""" 

254) 

255pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap( 

256 """The following datasets will be disassociated from 

257 {collections} if they are currently present in it (which is 

258 not checked), and removed from any datastores in which they 

259 are present.""" 

260) 

261pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

262pruneDatasets_askContinueMsg = "Continue?" 

263pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

264pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

265pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

266pruneDatasets_noDatasetsFound = "Did not find any datasets." 

267pruneDatasets_errPurgeAndDisassociate = unwrap( 

268 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

269 collections, but --purge forces disassociation from all of them. """ 

270) 

271pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

272pruneDatasets_errNoCollectionRestriction = unwrap( 

273 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all 

274 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

275 collection, or by using --disassociate to select a tagged collection.""" 

276) 

277pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

278pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

279 

280disassociate_option = MWOptionDecorator( 

281 "--disassociate", 

282 "disassociate_tags", 

283 help=unwrap( 

284 """Disassociate pruned datasets from the given tagged collections. May not be used with 

285 --purge.""" 

286 ), 

287 multiple=True, 

288 callback=split_commas, 

289 metavar="TAG", 

290) 

291 

292 

293purge_option = MWOptionDecorator( 

294 "--purge", 

295 "purge_run", 

296 help=unwrap( 

297 """Completely remove the dataset from the given RUN in the Registry. May not be used with 

298 --disassociate. Note, this may remove provenance information from datasets other than those 

299 provided, and should be used with extreme care. RUN has to provided for backward 

300 compatibility, but datasets will be removed from any RUN-type collections.""" 

301 ), 

302 metavar="RUN", 

303) 

304 

305 

306find_all_option = MWOptionDecorator( 

307 "--find-all", 

308 is_flag=True, 

309 help=unwrap( 

310 """Purge the dataset results from all of the collections in which a dataset of that dataset 

311 type + data id combination appear. (By default only the first found dataset type + data id is 

312 purged, according to the order of COLLECTIONS passed in).""" 

313 ), 

314) 

315 

316 

317unstore_option = MWOptionDecorator( 

318 "--unstore", 

319 is_flag=True, 

320 help=unwrap( 

321 """Remove these datasets from all datastores configured with this data repository. If 

322 --disassociate and --purge are not used then --unstore will be used by default. Note that 

323 --unstore will make it impossible to retrieve these datasets even via other collections. 

324 Datasets that are already not stored are ignored by this option.""" 

325 ), 

326) 

327 

328 

329dry_run_option = MWOptionDecorator( 

330 "--dry-run", 

331 is_flag=True, 

332 help=unwrap( 

333 """Display the datasets that would be removed but do not remove them. 

334 

335 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

336 will remove it from all of them, even though the only one this will show is its RUN 

337 collection.""" 

338 ), 

339) 

340 

341 

342quiet_option = MWOptionDecorator( 

343 "--quiet", 

344 is_flag=True, 

345 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed."""), 

346) 

347 

348 

349@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

350@repo_argument(required=True) 

351@collections_argument( 

352 help=unwrap( 

353 """COLLECTIONS is or more expressions that identify the collections to 

354 search for datasets. Glob-style expressions may be used but only if the 

355 --find-all flag is also passed.""" 

356 ) 

357) 

358@option_section("Query Datasets Options:") 

359@datasets_option( 

360 help="One or more glob-style expressions that identify the dataset types to be pruned.", 

361 multiple=True, 

362 callback=split_commas, 

363) 

364@find_all_option() 

365@where_option(help=where_help) 

366@option_section("Prune Options:") 

367@disassociate_option() 

368@purge_option() 

369@unstore_option() 

370@option_section("Execution Options:") 

371@dry_run_option() 

372@confirm_option() 

373@quiet_option() 

374@option_section("Other Options:") 

375@options_file_option() 

376def prune_datasets(**kwargs): 

377 """Query for and remove one or more datasets from a collection and/or 

378 storage. 

379 """ 

380 quiet = kwargs.pop("quiet", False) 

381 if quiet: 

382 if kwargs["dry_run"]: 

383 raise click.ClickException(message=pruneDatasets_errQuietWithDryRun) 

384 kwargs["confirm"] = False 

385 

386 result = script.pruneDatasets(**kwargs) 

387 

388 if result.errPurgeAndDisassociate: 

389 raise click.ClickException(message=pruneDatasets_errPurgeAndDisassociate) 

390 if result.errNoCollectionRestriction: 

391 raise click.ClickException(message=pruneDatasets_errNoCollectionRestriction) 

392 if result.errPruneOnNotRun: 

393 raise click.ClickException(message=pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

394 if result.errNoOp: 

395 raise click.ClickException(message=pruneDatasets_errNoOp) 

396 if result.dryRun: 

397 if result.action["disassociate"] and result.action["unstore"]: 

398 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

399 elif result.action["disassociate"]: 

400 msg = pruneDatasets_wouldDisassociateMsg 

401 else: 

402 msg = pruneDatasets_wouldRemoveMsg 

403 print(msg.format(**result.action)) 

404 printAstropyTables(result.tables) 

405 return 

406 if result.confirm: 

407 if not result.tables: 

408 print(pruneDatasets_noDatasetsFound) 

409 return 

410 print(pruneDatasets_willRemoveMsg) 

411 printAstropyTables(result.tables) 

412 doContinue = click.confirm(text=pruneDatasets_askContinueMsg, default=False) 

413 if doContinue: 

414 result.onConfirmation() 

415 print(pruneDatasets_didRemoveAforementioned) 

416 else: 

417 print(pruneDatasets_didNotRemoveAforementioned) 

418 return 

419 if result.finished: 

420 if not quiet: 

421 print(pruneDatasets_didRemoveMsg) 

422 printAstropyTables(result.tables) 

423 return 

424 

425 

426@click.command(short_help="Search for collections.", cls=ButlerCommand) 

427@repo_argument(required=True) 

428@glob_argument( 

429 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

430 "collections to return." 

431) 

432@collection_type_option() 

433@click.option( 

434 "--chains", 

435 default="TREE", 

436 help="""Affects how results are presented: 

437 

438 TABLE lists each dataset in table form, with columns for dataset name 

439 and type, and a column that lists children of CHAINED datasets (if any 

440 CHAINED datasets are found). 

441 

442 INVERSE-TABLE is like TABLE but instead of a column listing CHAINED 

443 dataset children, it lists the parents of the dataset if it is contained 

444 in any CHAINED collections. 

445 

446 TREE recursively lists children below each CHAINED dataset in tree form. 

447 

448 INVERSE-TREE recursively lists parent datasets below each dataset in 

449 tree form. 

450 

451 FLATTEN lists all datasets, including child datasets, in one list. 

452 

453 [default: TREE]""", 

454 # above, the default value is included, instead of using show_default, so 

455 # that the default is printed on its own line instead of coming right after 

456 # the FLATTEN text. 

457 callback=to_upper, 

458 type=click.Choice( 

459 choices=("TABLE", "INVERSE-TABLE", "TREE", "INVERSE-TREE", "FLATTEN"), 

460 case_sensitive=False, 

461 ), 

462) 

463@options_file_option() 

464def query_collections(*args, **kwargs): 

465 """Get the collections whose names match an expression.""" 

466 table = script.queryCollections(*args, **kwargs) 

467 # The unit test that mocks script.queryCollections does not return a table 

468 # so we need the following `if`. 

469 if table: 

470 # When chains==TREE, the children of chained datasets are indented 

471 # relative to their parents. For this to work properly the table must 

472 # be left-aligned. 

473 table.pprint_all(align="<") 

474 

475 

476@click.command(cls=ButlerCommand) 

477@repo_argument(required=True) 

478@glob_argument( 

479 help="GLOB is one or more glob-style expressions that fully or partially identify the " 

480 "dataset types to return." 

481) 

482@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

483@components_option() 

484@options_file_option() 

485def query_dataset_types(*args, **kwargs): 

486 """Get the dataset types in a repository.""" 

487 table = script.queryDatasetTypes(*args, **kwargs) 

488 if table: 

489 table.pprint_all() 

490 else: 

491 print("No results. Try --help for more information.") 

492 

493 

494@click.command(cls=ButlerCommand) 

495@repo_argument(required=True) 

496@click.argument("dataset-type-name", nargs=1) 

497def remove_dataset_type(*args, **kwargs): 

498 """Remove a dataset type definition from a repository.""" 

499 script.removeDatasetType(*args, **kwargs) 

500 

501 

502@click.command(cls=ButlerCommand) 

503@query_datasets_options() 

504@options_file_option() 

505def query_datasets(**kwargs): 

506 """List the datasets in a repository.""" 

507 for table in script.QueryDatasets(**kwargs).getTables(): 

508 print("") 

509 table.pprint_all() 

510 print("") 

511 

512 

513@click.command(cls=ButlerCommand) 

514@repo_argument(required=True) 

515@click.argument("input-collection") 

516@click.argument("output-collection") 

517@click.argument("dataset-type-name") 

518@click.option( 

519 "--begin-date", 

520 type=str, 

521 default=None, 

522 help=unwrap( 

523 """ISO-8601 datetime (TAI) of the beginning of the validity range for the 

524 certified calibrations.""" 

525 ), 

526) 

527@click.option( 

528 "--end-date", 

529 type=str, 

530 default=None, 

531 help=unwrap( 

532 """ISO-8601 datetime (TAI) of the end of the validity range for the 

533 certified calibrations.""" 

534 ), 

535) 

536@click.option( 

537 "--search-all-inputs", 

538 is_flag=True, 

539 default=False, 

540 help=unwrap( 

541 """Search all children of the inputCollection if it is a CHAINED collection, 

542 instead of just the most recent one.""" 

543 ), 

544) 

545@options_file_option() 

546def certify_calibrations(*args, **kwargs): 

547 """Certify calibrations in a repository.""" 

548 script.certifyCalibrations(*args, **kwargs) 

549 

550 

551@click.command(cls=ButlerCommand) 

552@repo_argument(required=True) 

553@dimensions_argument( 

554 help=unwrap( 

555 """DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

556 instrument, or tract. Will be expanded to include any dependencies.""" 

557 ) 

558) 

559@collections_option(help=collections_option.help + " May only be used with --datasets.") 

560@datasets_option( 

561 help=unwrap( 

562 """An expression that fully or partially identifies dataset types that should 

563 constrain the yielded data IDs. For example, including "raw" here would 

564 constrain the yielded "instrument", "exposure", "detector", and 

565 "physical_filter" values to only those for which at least one "raw" dataset 

566 exists in "collections". Requires --collections.""" 

567 ) 

568) 

569@where_option(help=where_help) 

570@order_by_option() 

571@limit_option() 

572@offset_option() 

573@options_file_option() 

574def query_data_ids(**kwargs): 

575 """List the data IDs in a repository.""" 

576 table, reason = script.queryDataIds(**kwargs) 

577 if table: 

578 table.pprint_all() 

579 else: 

580 if reason: 

581 print(reason) 

582 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

583 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

584 else: 

585 print("No results. Try --help for more information.") 

586 

587 

588@click.command(cls=ButlerCommand) 

589@repo_argument(required=True) 

590@element_argument(required=True) 

591@datasets_option( 

592 help=unwrap( 

593 """An expression that fully or partially identifies dataset types that should 

594 constrain the yielded records. May only be used with 

595 --collections.""" 

596 ) 

597) 

598@collections_option(help=collections_option.help + " May only be used with --datasets.") 

599@where_option(help=where_help) 

600@order_by_option() 

601@limit_option() 

602@offset_option() 

603@click.option( 

604 "--no-check", 

605 is_flag=True, 

606 help=unwrap( 

607 """Don't check the query before execution. By default the query is checked before it 

608 executed, this may reject some valid queries that resemble common mistakes.""" 

609 ), 

610) 

611@options_file_option() 

612def query_dimension_records(**kwargs): 

613 """Query for dimension information.""" 

614 table = script.queryDimensionRecords(**kwargs) 

615 if table: 

616 table.pprint_all() 

617 else: 

618 print("No results. Try --help for more information.") 

619 

620 

621@click.command(cls=ButlerCommand) 

622@repo_argument(required=True) 

623@query_datasets_options(showUri=False, useArguments=False, repo=False) 

624@destination_argument(help="Destination URI of folder to receive file artifacts.") 

625@transfer_option() 

626@verbose_option(help="Report destination location of all transferred artifacts.") 

627@click.option( 

628 "--preserve-path/--no-preserve-path", 

629 is_flag=True, 

630 default=True, 

631 help="Preserve the datastore path to the artifact at the destination.", 

632) 

633@click.option( 

634 "--clobber/--no-clobber", 

635 is_flag=True, 

636 default=False, 

637 help="If clobber, overwrite files if they exist locally.", 

638) 

639@options_file_option() 

640def retrieve_artifacts(**kwargs): 

641 """Retrieve file artifacts associated with datasets in a repository.""" 

642 verbose = kwargs.pop("verbose") 

643 transferred = script.retrieveArtifacts(**kwargs) 

644 if verbose and transferred: 

645 print(f"Transferred the following to {kwargs['destination']}:") 

646 for uri in transferred: 

647 print(uri) 

648 print() 

649 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

650 

651 

652@click.command(cls=ButlerCommand) 

653@click.argument("source", required=True) 

654@click.argument("dest", required=True) 

655@query_datasets_options(showUri=False, useArguments=False, repo=False) 

656@transfer_option() 

657@register_dataset_types_option() 

658@click.option( 

659 "--transfer-dimensions/--no-transfer-dimensions", 

660 is_flag=True, 

661 default=True, 

662 help=unwrap( 

663 """If true, also copy dimension records along with datasets. 

664 If the dmensions are already present in the destination butler it 

665 can be more efficient to disable this. The default is to transfer 

666 dimensions.""" 

667 ), 

668) 

669@options_file_option() 

670def transfer_datasets(**kwargs): 

671 """Transfer datasets from a source butler to a destination butler. 

672 

673 SOURCE is a URI to the Butler repository containing the RUN dataset. 

674 

675 DEST is a URI to the Butler repository that will receive copies of the 

676 datasets. 

677 """ 

678 number = script.transferDatasets(**kwargs) 

679 print(f"Number of datasets transferred: {number}") 

680 

681 

682@click.command(cls=ButlerCommand) 

683@repo_argument(required=True) 

684@click.argument("parent", required=True, nargs=1) 

685@click.argument("children", required=False, nargs=-1, callback=split_commas) 

686@click.option( 

687 "--doc", 

688 default="", 

689 help="Documentation string associated with this collection. " 

690 "Only relevant if the collection is newly created.", 

691) 

692@click.option( 

693 "--flatten/--no-flatten", 

694 default=False, 

695 help="If `True` recursively flatten out any nested chained collections in children first.", 

696) 

697@click.option( 

698 "--mode", 

699 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

700 default="redefine", 

701 help="Update mode: " 

702 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

703 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

704 "'pop': Pop a numbered element off the chain. Defaults to popping " 

705 "the first element (0). ``children`` must be integers if given. " 

706 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

707 "'extend': Modify existing chain to extend it with the supplied CHILDREN.", 

708) 

709def collection_chain(**kwargs): 

710 """Define a collection chain. 

711 

712 PARENT is the name of the chained collection to create or modify. If the 

713 collection already exists the chain associated with it will be updated. 

714 

715 CHILDREN are the collections to be used to modify the chain. The supplied 

716 values will be split on comma. The exact usage depends on the MODE option. 

717 For example, 

718 

719 $ butler collection-chain REPO PARENT child1,child2 child3 

720 

721 will result in three children being included in the chain. 

722 

723 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

724 collections to be removed from the current chain. 

725 MODE 'pop' can take negative integers to indicate removal relative to the 

726 end of the chain, but when doing that '--' must be given to indicate the 

727 end of the options specification. 

728 

729 $ butler collection-chain REPO --mode=pop PARENT -- -1 

730 

731 Will remove the final collection from the chain. 

732 """ 

733 chain = script.collectionChain(**kwargs) 

734 print(f"[{', '.join(chain)}]") 

735 

736 

737@click.command(cls=ButlerCommand) 

738@repo_argument(required=True) 

739@click.argument("dataset_type", required=True) 

740@click.argument("run", required=True) 

741@click.argument("table_file", required=True) 

742@click.option( 

743 "--formatter", 

744 type=str, 

745 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

746 " will be determined from the dataset type and datastore configuration.", 

747) 

748@click.option( 

749 "--id-generation-mode", 

750 default="UNIQUE", 

751 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

752 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

753 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

754 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

755 " repositories.", 

756 callback=to_upper, 

757 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False), 

758) 

759@click.option( 

760 "--data-id", 

761 type=str, 

762 multiple=True, 

763 callback=split_commas, 

764 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

765 " files. This can be used to simplify the table file by removing repeated entries that are" 

766 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

767 " comma separation or multiple command line options.", 

768) 

769@click.option( 

770 "--prefix", 

771 type=str, 

772 help="For relative paths in the table file, specify a prefix to use. The default is to" 

773 " use the current working directory.", 

774) 

775@transfer_option() 

776def ingest_files(**kwargs): 

777 """Ingest files from table file. 

778 

779 DATASET_TYPE is the name of the dataset type to be associated with these 

780 files. This dataset type must already exist and will not be created by 

781 this command. There can only be one dataset type per invocation of this 

782 command. 

783 

784 RUN is the run to use for the file ingest. 

785 

786 TABLE_FILE refers to a file that can be read by astropy.table with 

787 columns of: 

788 

789 file URI, dimension1, dimension2, ..., dimensionN 

790 

791 where the first column is the URI to the file to be ingested and the 

792 remaining columns define the dataId to associate with that file. 

793 The column names should match the dimensions for the specified dataset 

794 type. Relative file URI by default is assumed to be relative to the 

795 current working directory but can be overridden using the ``--prefix`` 

796 option. 

797 

798 This command does not create dimension records and so any records must 

799 be created by other means. This command should not be used to ingest 

800 raw camera exposures. 

801 """ 

802 script.ingest_files(**kwargs) 

803 

804 

805@click.command(cls=ButlerCommand) 

806@repo_argument(required=True) 

807@click.argument("dataset_type", required=True) 

808@click.argument("storage_class", required=True) 

809@click.argument("dimensions", required=False, nargs=-1) 

810@click.option( 

811 "--is-calibration/--no-is-calibration", 

812 is_flag=True, 

813 default=False, 

814 help="Indicate that this dataset type can be part of a calibration collection.", 

815) 

816def register_dataset_type(**kwargs): 

817 """Register a new dataset type with this butler repository. 

818 

819 DATASET_TYPE is the name of the dataset type. 

820 

821 STORAGE_CLASS is the name of the StorageClass to be associated with 

822 this dataset type. 

823 

824 DIMENSIONS is a list of all the dimensions relevant to this 

825 dataset type. It can be an empty list. 

826 

827 A component dataset type (such as "something.component") is not a 

828 real dataset type and so can not be defined by this command. They are 

829 automatically derived from the composite dataset type when a composite 

830 storage class is specified. 

831 """ 

832 inserted = script.register_dataset_type(**kwargs) 

833 if inserted: 

834 print("Dataset type successfully registered.") 

835 else: 

836 print("Dataset type already existed in identical form.") 

837 

838 

839@click.command(cls=ButlerCommand) 

840@repo_argument(required=True) 

841@directory_argument(required=True, help="DIRECTORY is the folder to receive the exported calibrations.") 

842@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

843@dataset_type_option(help="Specific DatasetType(s) to export.", multiple=True) 

844@transfer_option() 

845def export_calibs(*args, **kwargs): 

846 """Export calibrations from the butler for import elsewhere.""" 

847 table = script.exportCalibs(*args, **kwargs) 

848 if table: 

849 table.pprint_all(align="<")