Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = () 

23 

24import click 

25 

26from ..opt import ( 

27 collection_type_option, 

28 collection_argument, 

29 collections_argument, 

30 collections_option, 

31 components_option, 

32 dataset_type_option, 

33 datasets_option, 

34 destination_argument, 

35 dimensions_argument, 

36 directory_argument, 

37 element_argument, 

38 glob_argument, 

39 options_file_option, 

40 query_datasets_options, 

41 repo_argument, 

42 transfer_option, 

43 verbose_option, 

44 where_option, 

45) 

46 

47from ..utils import ( 

48 ButlerCommand, 

49 MWOptionDecorator, 

50 option_section, 

51 printAstropyTables, 

52 split_commas, 

53 to_upper, 

54 typeStrAcceptsMultiple, 

55 unwrap, 

56 where_help, 

57) 

58 

59from ... import script 

60 

61 

62willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist." 

63existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file." 

64 

65 

66@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.") 

67@repo_argument(required=True) 

68@collection_argument(help="COLLECTION is the collection the datasets should be associated with.") 

69@query_datasets_options(repo=False, showUri=False, useArguments=False) 

70@options_file_option() 

71def associate(**kwargs): 

72 """Add existing datasets to a tagged collection; searches for datasets with 

73 the options and adds them to the named COLLECTION. 

74 """ 

75 script.associate(**kwargs) 

76 

77 

78# The conversion from the import command name to the butler_import function 

79# name for subcommand lookup is implemented in the cli/butler.py, in 

80# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they 

81# must be reflected in that location. If this becomes a common pattern a better 

82# mechanism should be implemented. 

83@click.command("import", cls=ButlerCommand) 

84@repo_argument(required=True, help=willCreateRepoHelp) 

85@directory_argument(required=True) 

86@transfer_option() 

87@click.option("--export-file", 

88 help="Name for the file that contains database information associated with the exported " 

89 "datasets. If this is not an absolute path, does not exist in the current working " 

90 "directory, and --dir is provided, it is assumed to be in that directory. Defaults " 

91 "to \"export.yaml\".", 

92 type=click.File("r")) 

93@click.option("--skip-dimensions", "-s", type=str, multiple=True, callback=split_commas, 

94 metavar=typeStrAcceptsMultiple, 

95 help="Dimensions that should be skipped during import") 

96@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.") 

97@options_file_option() 

98def butler_import(*args, **kwargs): 

99 """Import data into a butler repository.""" 

100 script.butlerImport(*args, **kwargs) 

101 

102 

103@click.command(cls=ButlerCommand) 

104@repo_argument(required=True, help=willCreateRepoHelp) 

105@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).") 

106@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.") 

107@click.option("--standalone", is_flag=True, help="Include all defaults in the config file in the repo, " 

108 "insulating the repo from changes in package defaults.") 

109@click.option("--override", is_flag=True, help="Allow values in the supplied config to override all " 

110 "repo settings.") 

111@click.option("--outfile", "-f", default=None, type=str, help="Name of output file to receive repository " 

112 "configuration. Default is to write butler.yaml into the specified repo.") 

113@options_file_option() 

114def create(*args, **kwargs): 

115 """Create an empty Gen3 Butler repository.""" 

116 script.createRepo(*args, **kwargs) 

117 

118 

119@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand) 

120@repo_argument(required=True, help=existingRepoHelp) 

121@click.option("--subset", "-s", type=str, 

122 help="Subset of a configuration to report. This can be any key in the hierarchy such as " 

123 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.") 

124@click.option("--searchpath", "-p", type=str, multiple=True, callback=split_commas, 

125 metavar=typeStrAcceptsMultiple, 

126 help="Additional search paths to use for configuration overrides") 

127@click.option("--file", "outfile", type=click.File("w"), default="-", 

128 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout " 

129 "by default.") 

130@options_file_option() 

131def config_dump(*args, **kwargs): 

132 """Dump either a subset or full Butler configuration to standard output.""" 

133 script.configDump(*args, **kwargs) 

134 

135 

136@click.command(short_help="Validate the configuration files.", cls=ButlerCommand) 

137@repo_argument(required=True, help=existingRepoHelp) 

138@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.") 

139@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True) 

140@click.option("--ignore", "-i", type=str, multiple=True, callback=split_commas, 

141 metavar=typeStrAcceptsMultiple, 

142 help="DatasetType(s) to ignore for validation.") 

143@options_file_option() 

144def config_validate(*args, **kwargs): 

145 """Validate the configuration files for a Gen3 Butler repository.""" 

146 is_good = script.configValidate(*args, **kwargs) 

147 if not is_good: 

148 raise click.exceptions.Exit(1) 

149 

150 

151@click.command(cls=ButlerCommand) 

152@repo_argument(required=True) 

153@collection_argument(help=unwrap("""COLLECTION is the Name of the collection to remove. If this is a tagged or 

154 chained collection, datasets within the collection are not modified unless --unstore 

155 is passed. If this is a run collection, --purge and --unstore must be passed, and 

156 all datasets in it are fully removed from the data repository.""")) 

157@click.option("--purge", 

158 help=unwrap("""Permit RUN collections to be removed, fully removing datasets within them. 

159 Requires --unstore as an added precaution against accidental deletion. Must not be 

160 passed if the collection is not a RUN."""), 

161 is_flag=True) 

162@click.option("--unstore", 

163 help=("""Remove all datasets in the collection from all datastores in which they appear."""), 

164 is_flag=True) 

165@click.option("--unlink", 

166 help="Before removing the given `collection` unlink it from from this parent collection.", 

167 multiple=True, 

168 callback=split_commas) 

169@options_file_option() 

170def prune_collection(**kwargs): 

171 """Remove a collection and possibly prune datasets within it.""" 

172 script.pruneCollection(**kwargs) 

173 

174 

175pruneDatasets_wouldRemoveMsg = unwrap("""The following datasets will be removed from any datastores in which 

176 they are present:""") 

177pruneDatasets_wouldDisassociateMsg = unwrap("""The following datasets will be disassociated from {collections} 

178 if they are currently present in it (which is not checked):""") 

179pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap("""The following datasets will be disassociated from 

180 {collections} if they are currently present in it (which is 

181 not checked), and removed from any datastores in which they 

182 are present.""") 

183pruneDatasets_willRemoveMsg = "The following datasets will be removed:" 

184pruneDatasets_askContinueMsg = "Continue?" 

185pruneDatasets_didRemoveAforementioned = "The datasets were removed." 

186pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets." 

187pruneDatasets_didRemoveMsg = "Removed the following datasets:" 

188pruneDatasets_noDatasetsFound = "Did not find any datasets." 

189pruneDatasets_errPurgeAndDisassociate = unwrap( 

190 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged 

191 collections, but --purge forces disassociation from all of them. """ 

192) 

193pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together." 

194pruneDatasets_errNoCollectionRestriction = unwrap( 

195 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all 

196 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run 

197 collection, or by using --disassociate to select a tagged collection.""") 

198pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}" 

199pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided." 

200 

201disassociate_option = MWOptionDecorator( 

202 "--disassociate", "disassociate_tags", 

203 help=unwrap("""Disassociate pruned datasets from the given tagged collections. May not be used with 

204 --purge."""), 

205 multiple=True, 

206 callback=split_commas, 

207 metavar="TAG" 

208) 

209 

210 

211purge_option = MWOptionDecorator( 

212 "--purge", "purge_run", 

213 help=unwrap("""Completely remove the dataset from the given RUN in the Registry. May not be used with 

214 --disassociate. Note, this may remove provenance information from datasets other than those 

215 provided, and should be used with extreme care."""), 

216 metavar="RUN" 

217) 

218 

219 

220find_all_option = MWOptionDecorator( 

221 "--find-all", is_flag=True, 

222 help=unwrap("""Purge the dataset results from all of the collections in which a dataset of that dataset 

223 type + data id combination appear. (By default only the first found dataset type + data id is 

224 purged, according to the order of COLLECTIONS passed in).""") 

225) 

226 

227 

228unstore_option = MWOptionDecorator( 

229 "--unstore", 

230 is_flag=True, 

231 help=unwrap("""Remove these datasets from all datastores configured with this data repository. If 

232 --disassociate and --purge are not used then --unstore will be used by default. Note that 

233 --unstore will make it impossible to retrieve these datasets even via other collections. 

234 Datasets that are already not stored are ignored by this option.""") 

235) 

236 

237 

238dry_run_option = MWOptionDecorator( 

239 "--dry-run", 

240 is_flag=True, 

241 help=unwrap("""Display the datasets that would be removed but do not remove them. 

242 

243 Note that a dataset can be in collections other than its RUN-type collection, and removing it 

244 will remove it from all of them, even though the only one this will show is its RUN 

245 collection.""") 

246) 

247 

248 

249confirm_option = MWOptionDecorator( 

250 "--confirm/--no-confirm", 

251 default=True, 

252 help="Print expected action and a confirmation prompt before executing. Default is --confirm." 

253) 

254 

255 

256quiet_option = MWOptionDecorator( 

257 "--quiet", 

258 is_flag=True, 

259 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed.""") 

260) 

261 

262 

263@click.command(cls=ButlerCommand, short_help="Remove datasets.") 

264@repo_argument(required=True) 

265@collections_argument(help=unwrap("""COLLECTIONS is or more expressions that identify the collections to 

266 search for datasets. Glob-style expressions may be used but only if the 

267 --find-all flag is also passed.""")) 

268@option_section("Query Datasets Options:") 

269@datasets_option(help="One or more glob-style expressions that identify the dataset types to be pruned.", 

270 multiple=True, 

271 callback=split_commas) 

272@find_all_option() 

273@where_option(help=where_help) 

274@option_section("Prune Options:") 

275@disassociate_option() 

276@purge_option() 

277@unstore_option() 

278@option_section("Execution Options:") 

279@dry_run_option() 

280@confirm_option() 

281@quiet_option() 

282@option_section("Other Options:") 

283@options_file_option() 

284def prune_datasets(**kwargs): 

285 """Query for and remove one or more datasets from a collection and/or 

286 storage. 

287 """ 

288 quiet = kwargs.pop("quiet", False) 

289 if quiet: 

290 if kwargs["dry_run"]: 

291 raise click.ClickException(pruneDatasets_errQuietWithDryRun) 

292 kwargs["confirm"] = False 

293 

294 result = script.pruneDatasets(**kwargs) 

295 

296 if result.errPurgeAndDisassociate: 

297 raise click.ClickException(pruneDatasets_errPurgeAndDisassociate) 

298 return 

299 if result.errNoCollectionRestriction: 

300 raise click.ClickException(pruneDatasets_errNoCollectionRestriction) 

301 if result.errPruneOnNotRun: 

302 raise click.ClickException(pruneDatasets_errPruneOnNotRun.format(**result.errDict)) 

303 if result.errNoOp: 

304 raise click.ClickException(pruneDatasets_errNoOp) 

305 if result.dryRun: 

306 if result.action["disassociate"] and result.action["unstore"]: 

307 msg = pruneDatasets_wouldDisassociateAndRemoveMsg 

308 elif result.action["disassociate"]: 

309 msg = pruneDatasets_wouldDisassociateMsg 

310 else: 

311 msg = pruneDatasets_wouldRemoveMsg 

312 print(msg.format(**result.action)) 

313 printAstropyTables(result.tables) 

314 return 

315 if result.confirm: 

316 if not result.tables: 

317 print(pruneDatasets_noDatasetsFound) 

318 return 

319 print(pruneDatasets_willRemoveMsg) 

320 printAstropyTables(result.tables) 

321 doContinue = click.confirm(pruneDatasets_askContinueMsg, default=False) 

322 if doContinue: 

323 result.onConfirmation() 

324 print(pruneDatasets_didRemoveAforementioned) 

325 else: 

326 print(pruneDatasets_didNotRemoveAforementioned) 

327 return 

328 if result.finished: 

329 if not quiet: 

330 print(pruneDatasets_didRemoveMsg) 

331 printAstropyTables(result.tables) 

332 return 

333 

334 

335@click.command(short_help="Search for collections.", cls=ButlerCommand) 

336@repo_argument(required=True) 

337@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

338 "collections to return.") 

339@collection_type_option() 

340@click.option("--chains", 

341 default="table", 

342 help=unwrap("""Affects how results are presented. TABLE lists each dataset in a row with 

343 chained datasets' children listed in a Definition column. TREE lists children below 

344 their parent in tree form. FLATTEN lists all datasets, including child datasets in 

345 one list.Defaults to TABLE. """), 

346 callback=to_upper, 

347 type=click.Choice(("TABLE", "TREE", "FLATTEN"), case_sensitive=False)) 

348@options_file_option() 

349def query_collections(*args, **kwargs): 

350 """Get the collections whose names match an expression.""" 

351 table = script.queryCollections(*args, **kwargs) 

352 # The unit test that mocks script.queryCollections does not return a table 

353 # so we need the following `if`. 

354 if table: 

355 # When chains==TREE, the children of chained datasets are indented 

356 # relative to their parents. For this to work properly the table must 

357 # be left-aligned. 

358 table.pprint_all(align="<") 

359 

360 

361@click.command(cls=ButlerCommand) 

362@repo_argument(required=True) 

363@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the " 

364 "dataset types to return.") 

365@verbose_option(help="Include dataset type name, dimensions, and storage class in output.") 

366@components_option() 

367@options_file_option() 

368def query_dataset_types(*args, **kwargs): 

369 """Get the dataset types in a repository.""" 

370 table = script.queryDatasetTypes(*args, **kwargs) 

371 if table: 

372 table.pprint_all() 

373 else: 

374 print("No results. Try --help for more information.") 

375 

376 

377@click.command(cls=ButlerCommand) 

378@repo_argument(required=True) 

379@click.argument('dataset-type-name', nargs=1) 

380def remove_dataset_type(*args, **kwargs): 

381 """Remove a dataset type definition from a repository.""" 

382 script.removeDatasetType(*args, **kwargs) 

383 

384 

385@click.command(cls=ButlerCommand) 

386@query_datasets_options() 

387@options_file_option() 

388def query_datasets(**kwargs): 

389 """List the datasets in a repository.""" 

390 for table in script.QueryDatasets(**kwargs).getTables(): 

391 print("") 

392 table.pprint_all() 

393 print("") 

394 

395 

396@click.command(cls=ButlerCommand) 

397@repo_argument(required=True) 

398@click.argument('input-collection') 

399@click.argument('output-collection') 

400@click.argument('dataset-type-name') 

401@click.option("--begin-date", type=str, default=None, 

402 help=unwrap("""ISO-8601 datetime (TAI) of the beginning of the validity range for the 

403 certified calibrations.""")) 

404@click.option("--end-date", type=str, default=None, 

405 help=unwrap("""ISO-8601 datetime (TAI) of the end of the validity range for the 

406 certified calibrations.""")) 

407@click.option("--search-all-inputs", is_flag=True, default=False, 

408 help=unwrap("""Search all children of the inputCollection if it is a CHAINED collection, 

409 instead of just the most recent one.""")) 

410@options_file_option() 

411def certify_calibrations(*args, **kwargs): 

412 """Certify calibrations in a repository. 

413 """ 

414 script.certifyCalibrations(*args, **kwargs) 

415 

416 

417@click.command(cls=ButlerCommand) 

418@repo_argument(required=True) 

419@dimensions_argument(help=unwrap("""DIMENSIONS are the keys of the data IDs to yield, such as exposure, 

420 instrument, or tract. Will be expanded to include any dependencies.""")) 

421@collections_option() 

422@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should 

423 constrain the yielded data IDs. For example, including "raw" here would 

424 constrain the yielded "instrument", "exposure", "detector", and 

425 "physical_filter" values to only those for which at least one "raw" dataset 

426 exists in "collections".""")) 

427@where_option(help=where_help) 

428@options_file_option() 

429def query_data_ids(**kwargs): 

430 """List the data IDs in a repository. 

431 """ 

432 table = script.queryDataIds(**kwargs) 

433 if table: 

434 table.pprint_all() 

435 else: 

436 if not kwargs.get("dimensions") and not kwargs.get("datasets"): 

437 print("No results. Try requesting some dimensions or datasets, see --help for more information.") 

438 else: 

439 print("No results. Try --help for more information.") 

440 

441 

442@click.command(cls=ButlerCommand) 

443@repo_argument(required=True) 

444@element_argument(required=True) 

445@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should 

446 constrain the yielded records. Only affects results when used with 

447 --collections.""")) 

448@collections_option(help=collections_option.help + " Only affects results when used with --datasets.") 

449@where_option(help=where_help) 

450@click.option("--no-check", is_flag=True, 

451 help=unwrap("""Don't check the query before execution. By default the query is checked before it 

452 executed, this may reject some valid queries that resemble common mistakes.""")) 

453@options_file_option() 

454def query_dimension_records(**kwargs): 

455 """Query for dimension information.""" 

456 table = script.queryDimensionRecords(**kwargs) 

457 if table: 

458 table.pprint_all() 

459 else: 

460 print("No results. Try --help for more information.") 

461 

462 

463@click.command(cls=ButlerCommand) 

464@repo_argument(required=True) 

465@query_datasets_options(showUri=False, useArguments=False, repo=False) 

466@destination_argument(help="Destination URI of folder to receive file artifacts.") 

467@transfer_option() 

468@verbose_option(help="Report destination location of all transferred artifacts.") 

469@click.option("--preserve-path/--no-preserve-path", is_flag=True, default=True, 

470 help="Preserve the datastore path to the artifact at the destination.") 

471@click.option("--clobber/--no-clobber", is_flag=True, default=False, 

472 help="If clobber, overwrite files if they exist locally.") 

473@options_file_option() 

474def retrieve_artifacts(**kwargs): 

475 """Retrieve file artifacts associated with datasets in a repository.""" 

476 verbose = kwargs.pop("verbose") 

477 transferred = script.retrieveArtifacts(**kwargs) 

478 if verbose and transferred: 

479 print(f"Transferred the following to {kwargs['destination']}:") 

480 for uri in transferred: 

481 print(uri) 

482 print() 

483 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}") 

484 

485 

486@click.command(cls=ButlerCommand) 

487@click.argument("source", required=True) 

488@click.argument("dest", required=True) 

489@query_datasets_options(showUri=False, useArguments=False, repo=False) 

490@transfer_option() 

491@options_file_option() 

492def transfer_datasets(**kwargs): 

493 """Transfer datasets from a source butler to a destination butler. 

494 

495 SOURCE is a URI to the Butler repository containing the RUN dataset. 

496 

497 DEST is a URI to the Butler repository that will receive copies of the 

498 datasets. 

499 """ 

500 number = script.transferDatasets(**kwargs) 

501 print(f"Number of datasets transferred: {number}") 

502 

503 

504@click.command(cls=ButlerCommand) 

505@repo_argument(required=True) 

506@click.argument("parent", required=True, nargs=1) 

507@click.argument("children", required=False, nargs=-1) 

508@click.option("--doc", default="", 

509 help="Documentation string associated with this collection. " 

510 "Only relevant if the collection is newly created.") 

511@click.option("--flatten/--no-flatten", default=False, 

512 help="If `True` recursively flatten out any nested chained collections in children first.") 

513@click.option("--mode", 

514 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]), 

515 default="redefine", 

516 help="Update mode: " 

517 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. " 

518 "'remove': Modify existing chain to remove the supplied CHILDREN. " 

519 "'pop': Pop a numbered element off the chain. Defaults to popping " 

520 "the first element (0). ``children`` must be integers if given. " 

521 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. " 

522 "'extend': Modify existing chain to extend it with the supplied CHILDREN.") 

523def collection_chain(**kwargs): 

524 """Define a collection chain. 

525 

526 PARENT is the name of the chained collection to create or modify. If the 

527 collection already exists the chain associated with it will be updated. 

528 

529 CHILDREN are the collections to be used to modify the chain. The exact 

530 usage depends on the MODE option. When the MODE is 'pop' the CHILDREN 

531 should be integer indices indicating collections to be removed from 

532 the current chain. 

533 

534 MODE 'pop' can take negative integers to indicate removal relative to the 

535 end of the chain, but when doing that '--' must be given to indicate the 

536 end of the options specification. 

537 

538 $ butler collection-chain REPO --mode=pop PARENT -- -1 

539 

540 Will remove the final collection from the chain. 

541 """ 

542 chain = script.collectionChain(**kwargs) 

543 print(f"[{', '.join(chain)}]") 

544 

545 

546@click.command(cls=ButlerCommand) 

547@repo_argument(required=True) 

548@click.argument("dataset_type", required=True) 

549@click.argument("run", required=True) 

550@click.argument("table_file", required=True) 

551@click.option("--formatter", type=str, 

552 help="Fully-qualified python class to use as the Formatter. If not specified the formatter" 

553 " will be determined from the dataset type and datastore configuration.") 

554@click.option("--id-generation-mode", 

555 default="UNIQUE", 

556 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options" 

557 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;" 

558 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run." 

559 " The latter is usually used for 'raw'-type data that will be ingested in multiple." 

560 " repositories.", 

561 callback=to_upper, 

562 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False)) 

563@click.option("--data-id", 

564 type=str, 

565 multiple=True, callback=split_commas, 

566 help="Keyword=value string with an additional dataId value that is fixed for all ingested" 

567 " files. This can be used to simplify the table file by removing repeated entries that are" 

568 " fixed for all files to be ingested. Multiple key/values can be given either by using" 

569 " comma separation or multiple command line options.") 

570@click.option("--prefix", 

571 type=str, 

572 help="For relative paths in the table file, specify a prefix to use. The default is to" 

573 " use the current working directory.") 

574@transfer_option() 

575def ingest_files(**kwargs): 

576 """Ingest files from table file. 

577 

578 DATASET_TYPE is the name of the dataset type to be associated with these 

579 files. This dataset type must already exist and will not be created by 

580 this command. There can only be one dataset type per invocation of this 

581 command. 

582 

583 RUN is the run to use for the file ingest. 

584 

585 TABLE_FILE refers to a file that can be read by astropy.table with 

586 columns of: 

587 

588 file URI, dimension1, dimension2, ..., dimensionN 

589 

590 where the first column is the URI to the file to be ingested and the 

591 remaining columns define the dataId to associate with that file. 

592 The column names should match the dimensions for the specified dataset 

593 type. Relative file URI by default is assumed to be relative to the 

594 current working directory but can be overridden using the ``--prefix`` 

595 option. 

596 

597 This command does not create dimension records and so any records must 

598 be created by other means. This command should not be used to ingest 

599 raw camera exposures. 

600 """ 

601 script.ingest_files(**kwargs)