Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 56%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

297 statements  

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = () 

23 

24import click 

25 

26from ..opt import ( 

27 collection_type_option, 

28 collection_argument, 

29 collections_argument, 

30 collections_option, 

31 components_option, 

32 confirm_option, 

33 dataset_type_option, 

34 datasets_option, 

35 destination_argument, 

36 dimensions_argument, 

37 directory_argument, 

38 element_argument, 

39 glob_argument, 

40 limit_option, 

41 offset_option, 

42 options_file_option, 

43 order_by_option, 

44 query_datasets_options, 

45 register_dataset_types_option, 

46 repo_argument, 

47 transfer_option, 

48 verbose_option, 

49 where_option, 

50) 

51 

52from ..utils import ( 

53 ButlerCommand, 

54 MWOptionDecorator, 

55 option_section, 

56 printAstropyTables, 

57 split_commas, 

58 to_upper, 

59 typeStrAcceptsMultiple, 

60 unwrap, 

61 where_help, 

62) 

63 

64from ... import script 

65 

66 

67willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

68existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

69 

70 

71@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

72@repo_argument(required=True) 

73@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

74@query_datasets_options(repo=False, showUri=False, useArguments=False) 

75@options_file_option() 

76def associate(**kwargs): 

77 """Add existing datasets to a tagged collection; searches for datasets with 

78 the options and adds them to the named COLLECTION. 

79 """ 

80 script.associate(**kwargs) 

81 

82 

83# The conversion from the import command name to the butler_import function 

84# name for subcommand lookup is implemented in the cli/butler.py, in 

85# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

86# must be reflected in that location. If this becomes a common pattern a better 

87# mechanism should be implemented. 

88@click.command("import", cls=ButlerCommand) 

89@repo_argument(required=True, help=willCreateRepoHelp) 

90@directory_argument(required=True) 

91@transfer_option() 

92@click.option("--export-file", 

93 help="Name for the file that contains database information associated with the exported " 

94 "datasets. If this is not an absolute path, does not exist in the current working " 

95 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

96 "to \"export.yaml\".", 

97 type=click.File("r")) 

98@click.option("--skip-dimensions", "-s", type=str, multiple=True, callback=split_commas, 

99 metavar=typeStrAcceptsMultiple, 

100 help="Dimensions that should be skipped during import") 

101@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

102@options_file_option() 

103def butler_import(*args, **kwargs): 

104 """Import data into a butler repository.""" 

105 script.butlerImport(*args, **kwargs) 

106 

107 

108@click.command(cls=ButlerCommand) 

109@repo_argument(required=True, help=willCreateRepoHelp) 

110@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

111@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

112@click.option("--standalone", is_flag=True, help="Include all defaults in the config file in the repo, " 

113 "insulating the repo from changes in package defaults.") 

114@click.option("--override", is_flag=True, help="Allow values in the supplied config to override all " 

115 "repo settings.") 

116@click.option("--outfile", "-f", default=None, type=str, help="Name of output file to receive repository " 

117 "configuration. Default is to write butler.yaml into the specified repo.") 

118@options_file_option() 

119def create(*args, **kwargs): 

120 """Create an empty Gen3 Butler repository.""" 

121 script.createRepo(*args, **kwargs) 

122 

123 

124@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

125@repo_argument(required=True, help=existingRepoHelp) 

126@click.option("--subset", "-s", type=str, 

127 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

128 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.") 

129@click.option("--searchpath", "-p", type=str, multiple=True, callback=split_commas, 

130 metavar=typeStrAcceptsMultiple, 

131 help="Additional search paths to use for configuration overrides") 

132@click.option("--file", "outfile", type=click.File("w"), default="-", 

133 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout " 

134 "by default.") 

135@options_file_option() 

136def config_dump(*args, **kwargs): 

137 """Dump either a subset or full Butler configuration to standard output.""" 

138 script.configDump(*args, **kwargs) 

139 

140 

141@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

142@repo_argument(required=True, help=existingRepoHelp) 

143@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

144@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

145@click.option("--ignore", "-i", type=str, multiple=True, callback=split_commas, 

146 metavar=typeStrAcceptsMultiple, 

147 help="DatasetType(s) to ignore for validation.") 

148@options_file_option() 

149def config_validate(*args, **kwargs): 

150 """Validate the configuration files for a Gen3 Butler repository.""" 

151 is_good = script.configValidate(*args, **kwargs) 

152 if not is_good: 

153 raise click.exceptions.Exit(1) 

154 

155 

156@click.command(cls=ButlerCommand) 

157@repo_argument(required=True) 

158@collection_argument(help=unwrap("""COLLECTION is the Name of the collection to remove. If this is a tagged or 

159 chained collection, datasets within the collection are not modified unless --unstore 

160 is passed. If this is a run collection, --purge and --unstore must be passed, and 

161 all datasets in it are fully removed from the data repository.""")) 

162@click.option("--purge", 

163 help=unwrap("""Permit RUN collections to be removed, fully removing datasets within them. 

164 Requires --unstore as an added precaution against accidental deletion. Must not be 

165 passed if the collection is not a RUN."""), 

166 is_flag=True) 

167@click.option("--unstore", 

168 help=("""Remove all datasets in the collection from all datastores in which they appear."""), 

169 is_flag=True) 

170@click.option("--unlink", 

171 help="Before removing the given `collection` unlink it from from this parent collection.", 

172 multiple=True, 

173 callback=split_commas) 

174@confirm_option() 

175@options_file_option() 

176def prune_collection(**kwargs): 

177 """Remove a collection and possibly prune datasets within it.""" 

178 result = script.pruneCollection(**kwargs) 

179 if result.confirm: 

180 print("The following collections will be removed:") 

181 result.removeTable.pprint_all(align="<") 

182 doContinue = click.confirm("Continue?", default=False) 

183 else: 

184 doContinue = True 

185 if doContinue: 

186 result.onConfirmation() 

187 print("Removed collections.") 

188 else: 

189 print("Aborted.") 

190 

191 

192pruneDatasets_wouldRemoveMsg = unwrap("""The following datasets will be removed from any datastores in which 

193 they are present:""") 

194pruneDatasets_wouldDisassociateMsg = unwrap("""The following datasets will be disassociated from {collections} 

195 if they are currently present in it (which is not checked):""") 

196pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap("""The following datasets will be disassociated from 

197 {collections} if they are currently present in it (which is 

198 not checked), and removed from any datastores in which they 

199 are present.""") 

200pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

201pruneDatasets_askContinueMsg = "Continue?" 

202pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

203pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

204pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

205pruneDatasets_noDatasetsFound = "Did not find any datasets." 

206pruneDatasets_errPurgeAndDisassociate = unwrap( 

207 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

208 collections, but --purge forces disassociation from all of them. """ 

209) 

210pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

211pruneDatasets_errNoCollectionRestriction = unwrap( 

212 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all 

213 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

214 collection, or by using --disassociate to select a tagged collection.""") 

215pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

216pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

217 

218disassociate_option = MWOptionDecorator( 

219 "--disassociate", "disassociate_tags", 

220 help=unwrap("""Disassociate pruned datasets from the given tagged collections. May not be used with 

221 --purge."""), 

222 multiple=True, 

223 callback=split_commas, 

224 metavar="TAG" 

225) 

226 

227 

228purge_option = MWOptionDecorator( 

229 "--purge", "purge_run", 

230 help=unwrap("""Completely remove the dataset from the given RUN in the Registry. May not be used with 

231 --disassociate. Note, this may remove provenance information from datasets other than those 

232 provided, and should be used with extreme care."""), 

233 metavar="RUN" 

234) 

235 

236 

237find_all_option = MWOptionDecorator( 

238 "--find-all", is_flag=True, 

239 help=unwrap("""Purge the dataset results from all of the collections in which a dataset of that dataset 

240 type + data id combination appear. (By default only the first found dataset type + data id is 

241 purged, according to the order of COLLECTIONS passed in).""") 

242) 

243 

244 

245unstore_option = MWOptionDecorator( 

246 "--unstore", 

247 is_flag=True, 

248 help=unwrap("""Remove these datasets from all datastores configured with this data repository. If 

249 --disassociate and --purge are not used then --unstore will be used by default. Note that 

250 --unstore will make it impossible to retrieve these datasets even via other collections. 

251 Datasets that are already not stored are ignored by this option.""") 

252) 

253 

254 

255dry_run_option = MWOptionDecorator( 

256 "--dry-run", 

257 is_flag=True, 

258 help=unwrap("""Display the datasets that would be removed but do not remove them. 

259 

260 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

261 will remove it from all of them, even though the only one this will show is its RUN 

262 collection.""") 

263) 

264 

265 

266quiet_option = MWOptionDecorator( 

267 "--quiet", 

268 is_flag=True, 

269 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed.""") 

270) 

271 

272 

273@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

274@repo_argument(required=True) 

275@collections_argument(help=unwrap("""COLLECTIONS is or more expressions that identify the collections to 

276 search for datasets. Glob-style expressions may be used but only if the 

277 --find-all flag is also passed.""")) 

278@option_section("Query Datasets Options:") 

279@datasets_option(help="One or more glob-style expressions that identify the dataset types to be pruned.", 

280 multiple=True, 

281 callback=split_commas) 

282@find_all_option() 

283@where_option(help=where_help) 

284@option_section("Prune Options:") 

285@disassociate_option() 

286@purge_option() 

287@unstore_option() 

288@option_section("Execution Options:") 

289@dry_run_option() 

290@confirm_option() 

291@quiet_option() 

292@option_section("Other Options:") 

293@options_file_option() 

294def prune_datasets(**kwargs): 

295 """Query for and remove one or more datasets from a collection and/or 

296 storage. 

297 """ 

298 quiet = kwargs.pop("quiet", False) 

299 if quiet: 

300 if kwargs["dry_run"]: 

301 raise click.ClickException(pruneDatasets_errQuietWithDryRun) 

302 kwargs["confirm"] = False 

303 

304 result = script.pruneDatasets(**kwargs) 

305 

306 if result.errPurgeAndDisassociate: 

307 raise click.ClickException(pruneDatasets_errPurgeAndDisassociate) 

308 return 

309 if result.errNoCollectionRestriction: 

310 raise click.ClickException(pruneDatasets_errNoCollectionRestriction) 

311 if result.errPruneOnNotRun: 

312 raise click.ClickException(pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

313 if result.errNoOp: 

314 raise click.ClickException(pruneDatasets_errNoOp) 

315 if result.dryRun: 

316 if result.action["disassociate"] and result.action["unstore"]: 

317 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

318 elif result.action["disassociate"]: 

319 msg = pruneDatasets_wouldDisassociateMsg 

320 else: 

321 msg = pruneDatasets_wouldRemoveMsg 

322 print(msg.format(**result.action)) 

323 printAstropyTables(result.tables) 

324 return 

325 if result.confirm: 

326 if not result.tables: 

327 print(pruneDatasets_noDatasetsFound) 

328 return 

329 print(pruneDatasets_willRemoveMsg) 

330 printAstropyTables(result.tables) 

331 doContinue = click.confirm(pruneDatasets_askContinueMsg, default=False) 

332 if doContinue: 

333 result.onConfirmation() 

334 print(pruneDatasets_didRemoveAforementioned) 

335 else: 

336 print(pruneDatasets_didNotRemoveAforementioned) 

337 return 

338 if result.finished: 

339 if not quiet: 

340 print(pruneDatasets_didRemoveMsg) 

341 printAstropyTables(result.tables) 

342 return 

343 

344 

345@click.command(short_help="Search for collections.", cls=ButlerCommand) 

346@repo_argument(required=True) 

347@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

348 "collections to return.") 

349@collection_type_option() 

350@click.option("--chains", 

351 default="table", 

352 help=unwrap("""Affects how results are presented. TABLE lists each dataset in a row with 

353 chained datasets' children listed in a Definition column. TREE lists children below 

354 their parent in tree form. FLATTEN lists all datasets, including child datasets in 

355 one list.Defaults to TABLE. """), 

356 callback=to_upper, 

357 type=click.Choice(("TABLE", "TREE", "FLATTEN"), case_sensitive=False)) 

358@options_file_option() 

359def query_collections(*args, **kwargs): 

360 """Get the collections whose names match an expression.""" 

361 table = script.queryCollections(*args, **kwargs) 

362 # The unit test that mocks script.queryCollections does not return a table 

363 # so we need the following `if`. 

364 if table: 

365 # When chains==TREE, the children of chained datasets are indented 

366 # relative to their parents. For this to work properly the table must 

367 # be left-aligned. 

368 table.pprint_all(align="<") 

369 

370 

371@click.command(cls=ButlerCommand) 

372@repo_argument(required=True) 

373@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

374 "dataset types to return.") 

375@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

376@components_option() 

377@options_file_option() 

378def query_dataset_types(*args, **kwargs): 

379 """Get the dataset types in a repository.""" 

380 table = script.queryDatasetTypes(*args, **kwargs) 

381 if table: 

382 table.pprint_all() 

383 else: 

384 print("No results. Try --help for more information.") 

385 

386 

387@click.command(cls=ButlerCommand) 

388@repo_argument(required=True) 

389@click.argument('dataset-type-name', nargs=1) 

390def remove_dataset_type(*args, **kwargs): 

391 """Remove a dataset type definition from a repository.""" 

392 script.removeDatasetType(*args, **kwargs) 

393 

394 

395@click.command(cls=ButlerCommand) 

396@query_datasets_options() 

397@options_file_option() 

398def query_datasets(**kwargs): 

399 """List the datasets in a repository.""" 

400 for table in script.QueryDatasets(**kwargs).getTables(): 

401 print("") 

402 table.pprint_all() 

403 print("") 

404 

405 

406@click.command(cls=ButlerCommand) 

407@repo_argument(required=True) 

408@click.argument('input-collection') 

409@click.argument('output-collection') 

410@click.argument('dataset-type-name') 

411@click.option("--begin-date", type=str, default=None, 

412 help=unwrap("""ISO-8601 datetime (TAI) of the beginning of the validity range for the 

413 certified calibrations.""")) 

414@click.option("--end-date", type=str, default=None, 

415 help=unwrap("""ISO-8601 datetime (TAI) of the end of the validity range for the 

416 certified calibrations.""")) 

417@click.option("--search-all-inputs", is_flag=True, default=False, 

418 help=unwrap("""Search all children of the inputCollection if it is a CHAINED collection, 

419 instead of just the most recent one.""")) 

420@options_file_option() 

421def certify_calibrations(*args, **kwargs): 

422 """Certify calibrations in a repository. 

423 """ 

424 script.certifyCalibrations(*args, **kwargs) 

425 

426 

427@click.command(cls=ButlerCommand) 

428@repo_argument(required=True) 

429@dimensions_argument(help=unwrap("""DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

430 instrument, or tract. Will be expanded to include any dependencies.""")) 

431@collections_option(help=collections_option.help + " May only be used with --datasets.") 

432@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should 

433 constrain the yielded data IDs. For example, including "raw" here would 

434 constrain the yielded "instrument", "exposure", "detector", and 

435 "physical_filter" values to only those for which at least one "raw" dataset 

436 exists in "collections". Requires --collections.""")) 

437@where_option(help=where_help) 

438@order_by_option() 

439@limit_option() 

440@offset_option() 

441@options_file_option() 

442def query_data_ids(**kwargs): 

443 """List the data IDs in a repository. 

444 """ 

445 table = script.queryDataIds(**kwargs) 

446 if table: 

447 table.pprint_all() 

448 else: 

449 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

450 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

451 else: 

452 print("No results. Try --help for more information.") 

453 

454 

455@click.command(cls=ButlerCommand) 

456@repo_argument(required=True) 

457@element_argument(required=True) 

458@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should 

459 constrain the yielded records. May only be used with 

460 --collections.""")) 

461@collections_option(help=collections_option.help + " May only be used with --datasets.") 

462@where_option(help=where_help) 

463@order_by_option() 

464@limit_option() 

465@offset_option() 

466@click.option("--no-check", is_flag=True, 

467 help=unwrap("""Don't check the query before execution. By default the query is checked before it 

468 executed, this may reject some valid queries that resemble common mistakes.""")) 

469@options_file_option() 

470def query_dimension_records(**kwargs): 

471 """Query for dimension information.""" 

472 table = script.queryDimensionRecords(**kwargs) 

473 if table: 

474 table.pprint_all() 

475 else: 

476 print("No results. Try --help for more information.") 

477 

478 

479@click.command(cls=ButlerCommand) 

480@repo_argument(required=True) 

481@query_datasets_options(showUri=False, useArguments=False, repo=False) 

482@destination_argument(help="Destination URI of folder to receive file artifacts.") 

483@transfer_option() 

484@verbose_option(help="Report destination location of all transferred artifacts.") 

485@click.option("--preserve-path/--no-preserve-path", is_flag=True, default=True, 

486 help="Preserve the datastore path to the artifact at the destination.") 

487@click.option("--clobber/--no-clobber", is_flag=True, default=False, 

488 help="If clobber, overwrite files if they exist locally.") 

489@options_file_option() 

490def retrieve_artifacts(**kwargs): 

491 """Retrieve file artifacts associated with datasets in a repository.""" 

492 verbose = kwargs.pop("verbose") 

493 transferred = script.retrieveArtifacts(**kwargs) 

494 if verbose and transferred: 

495 print(f"Transferred the following to {kwargs['destination']}:") 

496 for uri in transferred: 

497 print(uri) 

498 print() 

499 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

500 

501 

502@click.command(cls=ButlerCommand) 

503@click.argument("source", required=True) 

504@click.argument("dest", required=True) 

505@query_datasets_options(showUri=False, useArguments=False, repo=False) 

506@transfer_option() 

507@register_dataset_types_option() 

508@options_file_option() 

509def transfer_datasets(**kwargs): 

510 """Transfer datasets from a source butler to a destination butler. 

511 

512 SOURCE is a URI to the Butler repository containing the RUN dataset. 

513 

514 DEST is a URI to the Butler repository that will receive copies of the 

515 datasets. 

516 """ 

517 number = script.transferDatasets(**kwargs) 

518 print(f"Number of datasets transferred: {number}") 

519 

520 

521@click.command(cls=ButlerCommand) 

522@repo_argument(required=True) 

523@click.argument("parent", required=True, nargs=1) 

524@click.argument("children", required=False, nargs=-1, callback=split_commas) 

525@click.option("--doc", default="", 

526 help="Documentation string associated with this collection. " 

527 "Only relevant if the collection is newly created.") 

528@click.option("--flatten/--no-flatten", default=False, 

529 help="If `True` recursively flatten out any nested chained collections in children first.") 

530@click.option("--mode", 

531 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

532 default="redefine", 

533 help="Update mode: " 

534 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

535 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

536 "'pop': Pop a numbered element off the chain. Defaults to popping " 

537 "the first element (0). ``children`` must be integers if given. " 

538 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

539 "'extend': Modify existing chain to extend it with the supplied CHILDREN.") 

540def collection_chain(**kwargs): 

541 """Define a collection chain. 

542 

543 PARENT is the name of the chained collection to create or modify. If the 

544 collection already exists the chain associated with it will be updated. 

545 

546 CHILDREN are the collections to be used to modify the chain. The supplied 

547 values will be split on comma. The exact usage depends on the MODE option. 

548 For example, 

549 

550 $ butler collection-chain REPO PARENT child1,child2 child3 

551 

552 will result in three children being included in the chain. 

553 

554 When the MODE is 'pop' the CHILDREN should be integer indices indicating 

555 collections to be removed from the current chain. 

556 MODE 'pop' can take negative integers to indicate removal relative to the 

557 end of the chain, but when doing that '--' must be given to indicate the 

558 end of the options specification. 

559 

560 $ butler collection-chain REPO --mode=pop PARENT -- -1 

561 

562 Will remove the final collection from the chain. 

563 """ 

564 chain = script.collectionChain(**kwargs) 

565 print(f"[{', '.join(chain)}]") 

566 

567 

568@click.command(cls=ButlerCommand) 

569@repo_argument(required=True) 

570@click.argument("dataset_type", required=True) 

571@click.argument("run", required=True) 

572@click.argument("table_file", required=True) 

573@click.option("--formatter", type=str, 

574 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

575 " will be determined from the dataset type and datastore configuration.") 

576@click.option("--id-generation-mode", 

577 default="UNIQUE", 

578 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

579 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

580 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

581 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

582 " repositories.", 

583 callback=to_upper, 

584 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False)) 

585@click.option("--data-id", 

586 type=str, 

587 multiple=True, callback=split_commas, 

588 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

589 " files. This can be used to simplify the table file by removing repeated entries that are" 

590 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

591 " comma separation or multiple command line options.") 

592@click.option("--prefix", 

593 type=str, 

594 help="For relative paths in the table file, specify a prefix to use. The default is to" 

595 " use the current working directory.") 

596@transfer_option() 

597def ingest_files(**kwargs): 

598 """Ingest files from table file. 

599 

600 DATASET_TYPE is the name of the dataset type to be associated with these 

601 files. This dataset type must already exist and will not be created by 

602 this command. There can only be one dataset type per invocation of this 

603 command. 

604 

605 RUN is the run to use for the file ingest. 

606 

607 TABLE_FILE refers to a file that can be read by astropy.table with 

608 columns of: 

609 

610 file URI, dimension1, dimension2, ..., dimensionN 

611 

612 where the first column is the URI to the file to be ingested and the 

613 remaining columns define the dataId to associate with that file. 

614 The column names should match the dimensions for the specified dataset 

615 type. Relative file URI by default is assumed to be relative to the 

616 current working directory but can be overridden using the ``--prefix`` 

617 option. 

618 

619 This command does not create dimension records and so any records must 

620 be created by other means. This command should not be used to ingest 

621 raw camera exposures. 

622 """ 

623 script.ingest_files(**kwargs) 

624 

625 

626@click.command(cls=ButlerCommand) 

627@repo_argument(required=True) 

628@click.argument("dataset_type", required=True) 

629@click.argument("storage_class", required=True) 

630@click.argument("dimensions", required=False, nargs=-1) 

631@click.option("--is-calibration/--no-is-calibration", is_flag=True, default=False, 

632 help="Indicate that this dataset type can be part of a calibration collection.") 

633def register_dataset_type(**kwargs): 

634 """Register a new dataset type with this butler repository. 

635 

636 DATASET_TYPE is the name of the dataset type. 

637 

638 STORAGE_CLASS is the name of the StorageClass to be associated with 

639 this dataset type. 

640 

641 DIMENSIONS is a list of all the dimensions relevant to this 

642 dataset type. It can be an empty list. 

643 

644 A component dataset type (such as "something.component") is not a 

645 real dataset type and so can not be defined by this command. They are 

646 automatically derived from the composite dataset type when a composite 

647 storage class is specified. 

648 """ 

649 inserted = script.register_dataset_type(**kwargs) 

650 if inserted: 

651 print("Dataset type successfully registered.") 

652 else: 

653 print("Dataset type already existed in identical form.") 

654 

655 

656@click.command(cls=ButlerCommand) 

657@repo_argument(required=True) 

658@directory_argument(required=True) 

659@collections_argument(help="COLLECTIONS are the collection to export calibrations from.") 

660def export_calibs(*args, **kwargs): 

661 """Export calibrations from the butler for import elsewhere.""" 

662 table = script.exportCalibs(*args, **kwargs) 

663 if table: 

664 table.pprint_all(align="<")