Coverage for python/lsst/daf/butler/cli/cmd/commands.py: 56%
286 statements
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-06 09:33 +0000
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-06 09:33 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
21from __future__ import annotations
23__all__ = ()
25from typing import Any
27import click
29from ... import script
30from ..opt import (
31 collection_argument,
32 collection_type_option,
33 collections_argument,
34 collections_option,
35 components_option,
36 confirm_option,
37 dataset_type_option,
38 datasets_option,
39 destination_argument,
40 dimensions_argument,
41 directory_argument,
42 element_argument,
43 glob_argument,
44 limit_option,
45 offset_option,
46 options_file_option,
47 order_by_option,
48 query_datasets_options,
49 register_dataset_types_option,
50 repo_argument,
51 transfer_dimensions_option,
52 transfer_option,
53 verbose_option,
54 where_option,
55)
56from ..utils import (
57 ButlerCommand,
58 MWOptionDecorator,
59 option_section,
60 printAstropyTables,
61 split_commas,
62 to_upper,
63 typeStrAcceptsMultiple,
64 unwrap,
65 where_help,
66)
68willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist."
69existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file."
72@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.")
73@repo_argument(required=True)
74@collection_argument(help="COLLECTION is the collection the datasets should be associated with.")
75@query_datasets_options(repo=False, showUri=False, useArguments=False)
76@options_file_option()
77def associate(**kwargs: Any) -> None:
78 """Add existing datasets to a tagged collection; searches for datasets with
79 the options and adds them to the named COLLECTION.
80 """
81 script.associate(**kwargs)
84# The conversion from the import command name to the butler_import function
85# name for subcommand lookup is implemented in the cli/butler.py, in
86# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they
87# must be reflected in that location. If this becomes a common pattern a better
88# mechanism should be implemented.
89@click.command("import", cls=ButlerCommand)
90@repo_argument(required=True, help=willCreateRepoHelp)
91@directory_argument(required=True)
92@transfer_option()
93@click.option(
94 "--export-file",
95 help="Name for the file that contains database information associated with the exported "
96 "datasets. If this is not an absolute path, does not exist in the current working "
97 "directory, and --dir is provided, it is assumed to be in that directory. Defaults "
98 'to "export.yaml".',
99 type=str,
100)
101@click.option(
102 "--skip-dimensions",
103 "-s",
104 type=str,
105 multiple=True,
106 callback=split_commas,
107 metavar=typeStrAcceptsMultiple,
108 help="Dimensions that should be skipped during import",
109)
110@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.")
111@options_file_option()
112def butler_import(*args: Any, **kwargs: Any) -> None:
113 """Import data into a butler repository."""
114 script.butlerImport(*args, **kwargs)
117@click.command(cls=ButlerCommand)
118@repo_argument(required=True, help=willCreateRepoHelp)
119@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).")
120@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.")
121@click.option(
122 "--standalone",
123 is_flag=True,
124 help="Include all defaults in the config file in the repo, "
125 "insulating the repo from changes in package defaults.",
126)
127@click.option(
128 "--override", is_flag=True, help="Allow values in the supplied config to override all repo settings."
129)
130@click.option(
131 "--outfile",
132 "-f",
133 default=None,
134 type=str,
135 help="Name of output file to receive repository "
136 "configuration. Default is to write butler.yaml into the specified repo.",
137)
138@options_file_option()
139def create(*args: Any, **kwargs: Any) -> None:
140 """Create an empty Gen3 Butler repository."""
141 script.createRepo(*args, **kwargs)
144@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand)
145@repo_argument(required=True, help=existingRepoHelp)
146@click.option(
147 "--subset",
148 "-s",
149 type=str,
150 help="Subset of a configuration to report. This can be any key in the hierarchy such as "
151 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.",
152)
153@click.option(
154 "--searchpath",
155 "-p",
156 type=str,
157 multiple=True,
158 callback=split_commas,
159 metavar=typeStrAcceptsMultiple,
160 help="Additional search paths to use for configuration overrides",
161)
162@click.option(
163 "--file",
164 "outfile",
165 type=click.File(mode="w"),
166 default="-",
167 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout by default.",
168)
169@options_file_option()
170def config_dump(*args: Any, **kwargs: Any) -> None:
171 """Dump either a subset or full Butler configuration to standard output."""
172 script.configDump(*args, **kwargs)
175@click.command(short_help="Validate the configuration files.", cls=ButlerCommand)
176@repo_argument(required=True, help=existingRepoHelp)
177@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.")
178@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True)
179@click.option(
180 "--ignore",
181 "-i",
182 type=str,
183 multiple=True,
184 callback=split_commas,
185 metavar=typeStrAcceptsMultiple,
186 help="DatasetType(s) to ignore for validation.",
187)
188@options_file_option()
189def config_validate(*args: Any, **kwargs: Any) -> None:
190 """Validate the configuration files for a Gen3 Butler repository."""
191 is_good = script.configValidate(*args, **kwargs)
192 if not is_good:
193 raise click.exceptions.Exit(1)
196pruneDatasets_wouldRemoveMsg = unwrap(
197 """The following datasets will be removed from any datastores in which
198 they are present:"""
199)
200pruneDatasets_wouldDisassociateMsg = unwrap(
201 """The following datasets will be disassociated from {collections}
202 if they are currently present in it (which is not checked):"""
203)
204pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap(
205 """The following datasets will be disassociated from
206 {collections} if they are currently present in it (which is
207 not checked), and removed from any datastores in which they
208 are present."""
209)
210pruneDatasets_willRemoveMsg = "The following datasets will be removed:"
211pruneDatasets_askContinueMsg = "Continue?"
212pruneDatasets_didRemoveAforementioned = "The datasets were removed."
213pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets."
214pruneDatasets_didRemoveMsg = "Removed the following datasets:"
215pruneDatasets_noDatasetsFound = "Did not find any datasets."
216pruneDatasets_errPurgeAndDisassociate = unwrap(
217 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged
218 collections, but --purge forces disassociation from all of them. """
219)
220pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together."
221pruneDatasets_errNoCollectionRestriction = unwrap(
222 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all
223 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run
224 collection, or by using --disassociate to select a tagged collection."""
225)
226pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}"
227pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided."
229disassociate_option = MWOptionDecorator(
230 "--disassociate",
231 "disassociate_tags",
232 help=unwrap(
233 """Disassociate pruned datasets from the given tagged collections. May not be used with
234 --purge."""
235 ),
236 multiple=True,
237 callback=split_commas,
238 metavar="TAG",
239)
242purge_option = MWOptionDecorator(
243 "--purge",
244 "purge_run",
245 help=unwrap(
246 """Completely remove the dataset from the given RUN in the Registry. May not be used with
247 --disassociate. Note, this may remove provenance information from datasets other than those
248 provided, and should be used with extreme care. RUN has to provided for backward
249 compatibility, but datasets will be removed from any RUN-type collections."""
250 ),
251 metavar="RUN",
252)
255find_all_option = MWOptionDecorator(
256 "--find-all",
257 is_flag=True,
258 help=unwrap(
259 """Purge the dataset results from all of the collections in which a dataset of that dataset
260 type + data id combination appear. (By default only the first found dataset type + data id is
261 purged, according to the order of COLLECTIONS passed in)."""
262 ),
263)
266unstore_option = MWOptionDecorator(
267 "--unstore",
268 is_flag=True,
269 help=unwrap(
270 """Remove these datasets from all datastores configured with this data repository. If
271 --disassociate and --purge are not used then --unstore will be used by default. Note that
272 --unstore will make it impossible to retrieve these datasets even via other collections.
273 Datasets that are already not stored are ignored by this option."""
274 ),
275)
278dry_run_option = MWOptionDecorator(
279 "--dry-run",
280 is_flag=True,
281 help=unwrap(
282 """Display the datasets that would be removed but do not remove them.
284 Note that a dataset can be in collections other than its RUN-type collection, and removing it
285 will remove it from all of them, even though the only one this will show is its RUN
286 collection."""
287 ),
288)
291quiet_option = MWOptionDecorator(
292 "--quiet",
293 is_flag=True,
294 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed."""),
295)
298@click.command(cls=ButlerCommand, short_help="Remove datasets.")
299@repo_argument(required=True)
300@collections_argument(
301 help=unwrap(
302 """COLLECTIONS is or more expressions that identify the collections to
303 search for datasets. Glob-style expressions may be used but only if the
304 --find-all flag is also passed."""
305 )
306)
307@option_section("Query Datasets Options:")
308@datasets_option(
309 help="One or more glob-style expressions that identify the dataset types to be pruned.",
310 multiple=True,
311 callback=split_commas,
312)
313@find_all_option()
314@where_option(help=where_help)
315@option_section("Prune Options:")
316@disassociate_option()
317@purge_option()
318@unstore_option()
319@option_section("Execution Options:")
320@dry_run_option()
321@confirm_option()
322@quiet_option()
323@option_section("Other Options:")
324@options_file_option()
325def prune_datasets(**kwargs: Any) -> None:
326 """Query for and remove one or more datasets from a collection and/or
327 storage.
328 """
329 quiet = kwargs.pop("quiet", False)
330 if quiet:
331 if kwargs["dry_run"]:
332 raise click.ClickException(message=pruneDatasets_errQuietWithDryRun)
333 kwargs["confirm"] = False
335 result = script.pruneDatasets(**kwargs)
337 if result.errPurgeAndDisassociate:
338 raise click.ClickException(message=pruneDatasets_errPurgeAndDisassociate)
339 if result.errNoCollectionRestriction:
340 raise click.ClickException(message=pruneDatasets_errNoCollectionRestriction)
341 if result.errPruneOnNotRun:
342 raise click.ClickException(message=pruneDatasets_errPruneOnNotRun.format(**result.errDict))
343 if result.errNoOp:
344 raise click.ClickException(message=pruneDatasets_errNoOp)
345 if result.dryRun:
346 assert result.action is not None, "Dry run results have not been set up properly."
347 if result.action["disassociate"] and result.action["unstore"]:
348 msg = pruneDatasets_wouldDisassociateAndRemoveMsg
349 elif result.action["disassociate"]:
350 msg = pruneDatasets_wouldDisassociateMsg
351 else:
352 msg = pruneDatasets_wouldRemoveMsg
353 print(msg.format(**result.action))
354 printAstropyTables(result.tables)
355 return
356 if result.confirm:
357 if not result.tables:
358 print(pruneDatasets_noDatasetsFound)
359 return
360 print(pruneDatasets_willRemoveMsg)
361 printAstropyTables(result.tables)
362 doContinue = click.confirm(text=pruneDatasets_askContinueMsg, default=False)
363 if doContinue:
364 if result.onConfirmation:
365 result.onConfirmation()
366 print(pruneDatasets_didRemoveAforementioned)
367 else:
368 print(pruneDatasets_didNotRemoveAforementioned)
369 return
370 if result.finished:
371 if not quiet:
372 print(pruneDatasets_didRemoveMsg)
373 printAstropyTables(result.tables)
374 return
377@click.command(short_help="Search for collections.", cls=ButlerCommand)
378@repo_argument(required=True)
379@glob_argument(
380 help="GLOB is one or more glob-style expressions that fully or partially identify the "
381 "collections to return."
382)
383@collection_type_option()
384@click.option(
385 "--chains",
386 default="TREE",
387 help="""Affects how results are presented:
389 TABLE lists each dataset in table form, with columns for dataset name
390 and type, and a column that lists children of CHAINED datasets (if any
391 CHAINED datasets are found).
393 INVERSE-TABLE is like TABLE but instead of a column listing CHAINED
394 dataset children, it lists the parents of the dataset if it is contained
395 in any CHAINED collections.
397 TREE recursively lists children below each CHAINED dataset in tree form.
399 INVERSE-TREE recursively lists parent datasets below each dataset in
400 tree form.
402 FLATTEN lists all datasets, including child datasets, in one list.
404 [default: TREE]""",
405 # above, the default value is included, instead of using show_default, so
406 # that the default is printed on its own line instead of coming right after
407 # the FLATTEN text.
408 callback=to_upper,
409 type=click.Choice(
410 choices=("TABLE", "INVERSE-TABLE", "TREE", "INVERSE-TREE", "FLATTEN"),
411 case_sensitive=False,
412 ),
413)
414@options_file_option()
415def query_collections(*args: Any, **kwargs: Any) -> None:
416 """Get the collections whose names match an expression."""
417 table = script.queryCollections(*args, **kwargs)
418 # The unit test that mocks script.queryCollections does not return a table
419 # so we need the following `if`.
420 if table:
421 # When chains==TREE, the children of chained datasets are indented
422 # relative to their parents. For this to work properly the table must
423 # be left-aligned.
424 table.pprint_all(align="<")
427@click.command(cls=ButlerCommand)
428@repo_argument(required=True)
429@glob_argument(
430 help="GLOB is one or more glob-style expressions that fully or partially identify the "
431 "dataset types to return."
432)
433@verbose_option(help="Include dataset type name, dimensions, and storage class in output.")
434@components_option()
435@options_file_option()
436def query_dataset_types(*args: Any, **kwargs: Any) -> None:
437 """Get the dataset types in a repository."""
438 table = script.queryDatasetTypes(*args, **kwargs)
439 if table:
440 table.pprint_all()
441 else:
442 print("No results. Try --help for more information.")
445@click.command(cls=ButlerCommand)
446@repo_argument(required=True)
447@click.argument("dataset-type-name", nargs=-1)
448def remove_dataset_type(*args: Any, **kwargs: Any) -> None:
449 """Remove the dataset type definitions from a repository."""
450 script.removeDatasetType(*args, **kwargs)
453@click.command(cls=ButlerCommand)
454@query_datasets_options()
455@options_file_option()
456def query_datasets(**kwargs: Any) -> None:
457 """List the datasets in a repository."""
458 for table in script.QueryDatasets(**kwargs).getTables():
459 print("")
460 table.pprint_all()
461 print("")
464@click.command(cls=ButlerCommand)
465@repo_argument(required=True)
466@click.argument("input-collection")
467@click.argument("output-collection")
468@click.argument("dataset-type-name")
469@click.option(
470 "--begin-date",
471 type=str,
472 default=None,
473 help=unwrap(
474 """ISO-8601 datetime (TAI) of the beginning of the validity range for the
475 certified calibrations."""
476 ),
477)
478@click.option(
479 "--end-date",
480 type=str,
481 default=None,
482 help=unwrap(
483 """ISO-8601 datetime (TAI) of the end of the validity range for the
484 certified calibrations."""
485 ),
486)
487@click.option(
488 "--search-all-inputs",
489 is_flag=True,
490 default=False,
491 help=unwrap(
492 """Search all children of the inputCollection if it is a CHAINED collection,
493 instead of just the most recent one."""
494 ),
495)
496@options_file_option()
497def certify_calibrations(*args: Any, **kwargs: Any) -> None:
498 """Certify calibrations in a repository."""
499 script.certifyCalibrations(*args, **kwargs)
502@click.command(cls=ButlerCommand)
503@repo_argument(required=True)
504@dimensions_argument(
505 help=unwrap(
506 """DIMENSIONS are the keys of the data IDs to yield, such as exposure,
507 instrument, or tract. Will be expanded to include any dependencies."""
508 )
509)
510@collections_option(help=collections_option.help + " May only be used with --datasets.")
511@datasets_option(
512 help=unwrap(
513 """An expression that fully or partially identifies dataset types that should
514 constrain the yielded data IDs. For example, including "raw" here would
515 constrain the yielded "instrument", "exposure", "detector", and
516 "physical_filter" values to only those for which at least one "raw" dataset
517 exists in "collections". Requires --collections."""
518 )
519)
520@where_option(help=where_help)
521@order_by_option()
522@limit_option()
523@offset_option()
524@options_file_option()
525def query_data_ids(**kwargs: Any) -> None:
526 """List the data IDs in a repository."""
527 table, reason = script.queryDataIds(**kwargs)
528 if table:
529 table.pprint_all()
530 else:
531 if reason:
532 print(reason)
533 if not kwargs.get("dimensions") and not kwargs.get("datasets"):
534 print("No results. Try requesting some dimensions or datasets, see --help for more information.")
535 else:
536 print("No results. Try --help for more information.")
539@click.command(cls=ButlerCommand)
540@repo_argument(required=True)
541@element_argument(required=True)
542@datasets_option(
543 help=unwrap(
544 """An expression that fully or partially identifies dataset types that should
545 constrain the yielded records. May only be used with
546 --collections."""
547 )
548)
549@collections_option(help=collections_option.help + " May only be used with --datasets.")
550@where_option(help=where_help)
551@order_by_option()
552@limit_option()
553@offset_option()
554@click.option(
555 "--no-check",
556 is_flag=True,
557 help=unwrap(
558 """Don't check the query before execution. By default the query is checked before it
559 executed, this may reject some valid queries that resemble common mistakes."""
560 ),
561)
562@options_file_option()
563def query_dimension_records(**kwargs: Any) -> None:
564 """Query for dimension information."""
565 table = script.queryDimensionRecords(**kwargs)
566 if table:
567 table.pprint_all()
568 else:
569 print("No results. Try --help for more information.")
572@click.command(cls=ButlerCommand)
573@repo_argument(required=True)
574@query_datasets_options(showUri=False, useArguments=False, repo=False)
575@destination_argument(help="Destination URI of folder to receive file artifacts.")
576@transfer_option()
577@verbose_option(help="Report destination location of all transferred artifacts.")
578@click.option(
579 "--preserve-path/--no-preserve-path",
580 is_flag=True,
581 default=True,
582 help="Preserve the datastore path to the artifact at the destination.",
583)
584@click.option(
585 "--clobber/--no-clobber",
586 is_flag=True,
587 default=False,
588 help="If clobber, overwrite files if they exist locally.",
589)
590@options_file_option()
591def retrieve_artifacts(**kwargs: Any) -> None:
592 """Retrieve file artifacts associated with datasets in a repository."""
593 verbose = kwargs.pop("verbose")
594 transferred = script.retrieveArtifacts(**kwargs)
595 if verbose and transferred:
596 print(f"Transferred the following to {kwargs['destination']}:")
597 for uri in transferred:
598 print(uri)
599 print()
600 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}")
603@click.command(cls=ButlerCommand)
604@click.argument("source", required=True)
605@click.argument("dest", required=True)
606@query_datasets_options(showUri=False, useArguments=False, repo=False)
607@transfer_option()
608@register_dataset_types_option()
609@transfer_dimensions_option()
610@options_file_option()
611def transfer_datasets(**kwargs: Any) -> None:
612 """Transfer datasets from a source butler to a destination butler.
614 SOURCE is a URI to the Butler repository containing the RUN dataset.
616 DEST is a URI to the Butler repository that will receive copies of the
617 datasets.
618 """
619 number = script.transferDatasets(**kwargs)
620 print(f"Number of datasets transferred: {number}")
623@click.command(cls=ButlerCommand)
624@repo_argument(required=True)
625@click.argument("parent", required=True, nargs=1)
626@click.argument("children", required=False, nargs=-1, callback=split_commas)
627@click.option(
628 "--doc",
629 default="",
630 help="Documentation string associated with this collection. "
631 "Only relevant if the collection is newly created.",
632)
633@click.option(
634 "--flatten/--no-flatten",
635 default=False,
636 help="If `True` recursively flatten out any nested chained collections in children first.",
637)
638@click.option(
639 "--mode",
640 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]),
641 default="redefine",
642 help="Update mode: "
643 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. "
644 "'remove': Modify existing chain to remove the supplied CHILDREN. "
645 "'pop': Pop a numbered element off the chain. Defaults to popping "
646 "the first element (0). ``children`` must be integers if given. "
647 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. "
648 "'extend': Modify existing chain to extend it with the supplied CHILDREN.",
649)
650def collection_chain(**kwargs: Any) -> None:
651 """Define a collection chain.
653 PARENT is the name of the chained collection to create or modify. If the
654 collection already exists the chain associated with it will be updated.
656 CHILDREN are the collections to be used to modify the chain. The supplied
657 values will be split on comma. The exact usage depends on the MODE option.
658 For example,
660 $ butler collection-chain REPO PARENT child1,child2 child3
662 will result in three children being included in the chain.
664 When the MODE is 'pop' the CHILDREN should be integer indices indicating
665 collections to be removed from the current chain.
666 MODE 'pop' can take negative integers to indicate removal relative to the
667 end of the chain, but when doing that '--' must be given to indicate the
668 end of the options specification.
670 $ butler collection-chain REPO --mode=pop PARENT -- -1
672 Will remove the final collection from the chain.
673 """
674 chain = script.collectionChain(**kwargs)
675 print(f"[{', '.join(chain)}]")
678@click.command(cls=ButlerCommand)
679@repo_argument(required=True)
680@click.argument("dataset_type", required=True)
681@click.argument("run", required=True)
682@click.argument("table_file", required=True)
683@click.option(
684 "--formatter",
685 type=str,
686 help="Fully-qualified python class to use as the Formatter. If not specified the formatter"
687 " will be determined from the dataset type and datastore configuration.",
688)
689@click.option(
690 "--id-generation-mode",
691 default="UNIQUE",
692 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options"
693 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;"
694 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run."
695 " The latter is usually used for 'raw'-type data that will be ingested in multiple."
696 " repositories.",
697 callback=to_upper,
698 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False),
699)
700@click.option(
701 "--data-id",
702 type=str,
703 multiple=True,
704 callback=split_commas,
705 help="Keyword=value string with an additional dataId value that is fixed for all ingested"
706 " files. This can be used to simplify the table file by removing repeated entries that are"
707 " fixed for all files to be ingested. Multiple key/values can be given either by using"
708 " comma separation or multiple command line options.",
709)
710@click.option(
711 "--prefix",
712 type=str,
713 help="For relative paths in the table file, specify a prefix to use. The default is to"
714 " use the current working directory.",
715)
716@transfer_option()
717def ingest_files(**kwargs: Any) -> None:
718 """Ingest files from table file.
720 DATASET_TYPE is the name of the dataset type to be associated with these
721 files. This dataset type must already exist and will not be created by
722 this command. There can only be one dataset type per invocation of this
723 command.
725 RUN is the run to use for the file ingest.
727 TABLE_FILE refers to a file that can be read by astropy.table with
728 columns of:
730 file URI, dimension1, dimension2, ..., dimensionN
732 where the first column is the URI to the file to be ingested and the
733 remaining columns define the dataId to associate with that file.
734 The column names should match the dimensions for the specified dataset
735 type. Relative file URI by default is assumed to be relative to the
736 current working directory but can be overridden using the ``--prefix``
737 option.
739 This command does not create dimension records and so any records must
740 be created by other means. This command should not be used to ingest
741 raw camera exposures.
742 """
743 script.ingest_files(**kwargs)
746@click.command(cls=ButlerCommand)
747@repo_argument(required=True)
748@click.argument("dataset_type", required=True)
749@click.argument("storage_class", required=True)
750@click.argument("dimensions", required=False, nargs=-1)
751@click.option(
752 "--is-calibration/--no-is-calibration",
753 is_flag=True,
754 default=False,
755 help="Indicate that this dataset type can be part of a calibration collection.",
756)
757def register_dataset_type(**kwargs: Any) -> None:
758 """Register a new dataset type with this butler repository.
760 DATASET_TYPE is the name of the dataset type.
762 STORAGE_CLASS is the name of the StorageClass to be associated with
763 this dataset type.
765 DIMENSIONS is a list of all the dimensions relevant to this
766 dataset type. It can be an empty list.
768 A component dataset type (such as "something.component") is not a
769 real dataset type and so can not be defined by this command. They are
770 automatically derived from the composite dataset type when a composite
771 storage class is specified.
772 """
773 inserted = script.register_dataset_type(**kwargs)
774 if inserted:
775 print("Dataset type successfully registered.")
776 else:
777 print("Dataset type already existed in identical form.")
780@click.command(cls=ButlerCommand)
781@repo_argument(required=True)
782@directory_argument(required=True, help="DIRECTORY is the folder to receive the exported calibrations.")
783@collections_argument(help="COLLECTIONS are the collection to export calibrations from.")
784@dataset_type_option(help="Specific DatasetType(s) to export.", multiple=True)
785@transfer_option()
786def export_calibs(*args: Any, **kwargs: Any) -> None:
787 """Export calibrations from the butler for import elsewhere."""
788 table = script.exportCalibs(*args, **kwargs)
789 if table:
790 table.pprint_all(align="<")