Coverage for python/lsst/daf/butler/cli/cmd/commands.py : 55%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22__all__ = ()
24import click
26from ..opt import (
27 collection_type_option,
28 collection_argument,
29 collections_argument,
30 collections_option,
31 components_option,
32 confirm_option,
33 dataset_type_option,
34 datasets_option,
35 destination_argument,
36 dimensions_argument,
37 directory_argument,
38 element_argument,
39 glob_argument,
40 options_file_option,
41 query_datasets_options,
42 repo_argument,
43 transfer_option,
44 verbose_option,
45 where_option,
46)
48from ..utils import (
49 ButlerCommand,
50 MWOptionDecorator,
51 option_section,
52 printAstropyTables,
53 split_commas,
54 to_upper,
55 typeStrAcceptsMultiple,
56 unwrap,
57 where_help,
58)
60from ... import script
63willCreateRepoHelp = "REPO is the URI or path to the new repository. Will be created if it does not exist."
64existingRepoHelp = "REPO is the URI or path to an existing data repository root or configuration file."
67@click.command(cls=ButlerCommand, short_help="Add existing datasets to a tagged collection.")
68@repo_argument(required=True)
69@collection_argument(help="COLLECTION is the collection the datasets should be associated with.")
70@query_datasets_options(repo=False, showUri=False, useArguments=False)
71@options_file_option()
72def associate(**kwargs):
73 """Add existing datasets to a tagged collection; searches for datasets with
74 the options and adds them to the named COLLECTION.
75 """
76 script.associate(**kwargs)
79# The conversion from the import command name to the butler_import function
80# name for subcommand lookup is implemented in the cli/butler.py, in
81# funcNameToCmdName and cmdNameToFuncName. If name changes are made here they
82# must be reflected in that location. If this becomes a common pattern a better
83# mechanism should be implemented.
84@click.command("import", cls=ButlerCommand)
85@repo_argument(required=True, help=willCreateRepoHelp)
86@directory_argument(required=True)
87@transfer_option()
88@click.option("--export-file",
89 help="Name for the file that contains database information associated with the exported "
90 "datasets. If this is not an absolute path, does not exist in the current working "
91 "directory, and --dir is provided, it is assumed to be in that directory. Defaults "
92 "to \"export.yaml\".",
93 type=click.File("r"))
94@click.option("--skip-dimensions", "-s", type=str, multiple=True, callback=split_commas,
95 metavar=typeStrAcceptsMultiple,
96 help="Dimensions that should be skipped during import")
97@click.option("--reuse-ids", is_flag=True, help="Force re-use of imported dataset IDs for integer IDs.")
98@options_file_option()
99def butler_import(*args, **kwargs):
100 """Import data into a butler repository."""
101 script.butlerImport(*args, **kwargs)
104@click.command(cls=ButlerCommand)
105@repo_argument(required=True, help=willCreateRepoHelp)
106@click.option("--seed-config", help="Path to an existing YAML config file to apply (on top of defaults).")
107@click.option("--dimension-config", help="Path to an existing YAML config file with dimension configuration.")
108@click.option("--standalone", is_flag=True, help="Include all defaults in the config file in the repo, "
109 "insulating the repo from changes in package defaults.")
110@click.option("--override", is_flag=True, help="Allow values in the supplied config to override all "
111 "repo settings.")
112@click.option("--outfile", "-f", default=None, type=str, help="Name of output file to receive repository "
113 "configuration. Default is to write butler.yaml into the specified repo.")
114@options_file_option()
115def create(*args, **kwargs):
116 """Create an empty Gen3 Butler repository."""
117 script.createRepo(*args, **kwargs)
120@click.command(short_help="Dump butler config to stdout.", cls=ButlerCommand)
121@repo_argument(required=True, help=existingRepoHelp)
122@click.option("--subset", "-s", type=str,
123 help="Subset of a configuration to report. This can be any key in the hierarchy such as "
124 "'.datastore.root' where the leading '.' specified the delimiter for the hierarchy.")
125@click.option("--searchpath", "-p", type=str, multiple=True, callback=split_commas,
126 metavar=typeStrAcceptsMultiple,
127 help="Additional search paths to use for configuration overrides")
128@click.option("--file", "outfile", type=click.File("w"), default="-",
129 help="Print the (possibly-expanded) configuration for a repository to a file, or to stdout "
130 "by default.")
131@options_file_option()
132def config_dump(*args, **kwargs):
133 """Dump either a subset or full Butler configuration to standard output."""
134 script.configDump(*args, **kwargs)
137@click.command(short_help="Validate the configuration files.", cls=ButlerCommand)
138@repo_argument(required=True, help=existingRepoHelp)
139@click.option("--quiet", "-q", is_flag=True, help="Do not report individual failures.")
140@dataset_type_option(help="Specific DatasetType(s) to validate.", multiple=True)
141@click.option("--ignore", "-i", type=str, multiple=True, callback=split_commas,
142 metavar=typeStrAcceptsMultiple,
143 help="DatasetType(s) to ignore for validation.")
144@options_file_option()
145def config_validate(*args, **kwargs):
146 """Validate the configuration files for a Gen3 Butler repository."""
147 is_good = script.configValidate(*args, **kwargs)
148 if not is_good:
149 raise click.exceptions.Exit(1)
152@click.command(cls=ButlerCommand)
153@repo_argument(required=True)
154@collection_argument(help=unwrap("""COLLECTION is the Name of the collection to remove. If this is a tagged or
155 chained collection, datasets within the collection are not modified unless --unstore
156 is passed. If this is a run collection, --purge and --unstore must be passed, and
157 all datasets in it are fully removed from the data repository."""))
158@click.option("--purge",
159 help=unwrap("""Permit RUN collections to be removed, fully removing datasets within them.
160 Requires --unstore as an added precaution against accidental deletion. Must not be
161 passed if the collection is not a RUN."""),
162 is_flag=True)
163@click.option("--unstore",
164 help=("""Remove all datasets in the collection from all datastores in which they appear."""),
165 is_flag=True)
166@click.option("--unlink",
167 help="Before removing the given `collection` unlink it from from this parent collection.",
168 multiple=True,
169 callback=split_commas)
170@confirm_option()
171@options_file_option()
172def prune_collection(**kwargs):
173 """Remove a collection and possibly prune datasets within it."""
174 result = script.pruneCollection(**kwargs)
175 if result.confirm:
176 print("The following collections will be removed:")
177 result.removeTable.pprint_all(align="<")
178 doContinue = click.confirm("Continue?", default=False)
179 else:
180 doContinue = True
181 if doContinue:
182 result.onConfirmation()
183 print("Removed collections.")
184 else:
185 print("Aborted.")
188pruneDatasets_wouldRemoveMsg = unwrap("""The following datasets will be removed from any datastores in which
189 they are present:""")
190pruneDatasets_wouldDisassociateMsg = unwrap("""The following datasets will be disassociated from {collections}
191 if they are currently present in it (which is not checked):""")
192pruneDatasets_wouldDisassociateAndRemoveMsg = unwrap("""The following datasets will be disassociated from
193 {collections} if they are currently present in it (which is
194 not checked), and removed from any datastores in which they
195 are present.""")
196pruneDatasets_willRemoveMsg = "The following datasets will be removed:"
197pruneDatasets_askContinueMsg = "Continue?"
198pruneDatasets_didRemoveAforementioned = "The datasets were removed."
199pruneDatasets_didNotRemoveAforementioned = "Did not remove the datasets."
200pruneDatasets_didRemoveMsg = "Removed the following datasets:"
201pruneDatasets_noDatasetsFound = "Did not find any datasets."
202pruneDatasets_errPurgeAndDisassociate = unwrap(
203 """"--disassociate and --purge may not be used together: --disassociate purges from just the passed TAGged
204 collections, but --purge forces disassociation from all of them. """
205)
206pruneDatasets_errQuietWithDryRun = "Can not use --quiet and --dry-run together."
207pruneDatasets_errNoCollectionRestriction = unwrap(
208 """Must indicate collections from which to prune datasets by passing COLLETION arguments (select all
209 collections by passing '*', or consider using 'butler prune-collections'), by using --purge to pass a run
210 collection, or by using --disassociate to select a tagged collection.""")
211pruneDatasets_errPruneOnNotRun = "Can not prune a collection that is not a RUN collection: {collection}"
212pruneDatasets_errNoOp = "No operation: one of --purge, --unstore, or --disassociate must be provided."
214disassociate_option = MWOptionDecorator(
215 "--disassociate", "disassociate_tags",
216 help=unwrap("""Disassociate pruned datasets from the given tagged collections. May not be used with
217 --purge."""),
218 multiple=True,
219 callback=split_commas,
220 metavar="TAG"
221)
224purge_option = MWOptionDecorator(
225 "--purge", "purge_run",
226 help=unwrap("""Completely remove the dataset from the given RUN in the Registry. May not be used with
227 --disassociate. Note, this may remove provenance information from datasets other than those
228 provided, and should be used with extreme care."""),
229 metavar="RUN"
230)
233find_all_option = MWOptionDecorator(
234 "--find-all", is_flag=True,
235 help=unwrap("""Purge the dataset results from all of the collections in which a dataset of that dataset
236 type + data id combination appear. (By default only the first found dataset type + data id is
237 purged, according to the order of COLLECTIONS passed in).""")
238)
241unstore_option = MWOptionDecorator(
242 "--unstore",
243 is_flag=True,
244 help=unwrap("""Remove these datasets from all datastores configured with this data repository. If
245 --disassociate and --purge are not used then --unstore will be used by default. Note that
246 --unstore will make it impossible to retrieve these datasets even via other collections.
247 Datasets that are already not stored are ignored by this option.""")
248)
251dry_run_option = MWOptionDecorator(
252 "--dry-run",
253 is_flag=True,
254 help=unwrap("""Display the datasets that would be removed but do not remove them.
256 Note that a dataset can be in collections other than its RUN-type collection, and removing it
257 will remove it from all of them, even though the only one this will show is its RUN
258 collection.""")
259)
262quiet_option = MWOptionDecorator(
263 "--quiet",
264 is_flag=True,
265 help=unwrap("""Makes output quiet. Implies --no-confirm. Requires --dry-run not be passed.""")
266)
269@click.command(cls=ButlerCommand, short_help="Remove datasets.")
270@repo_argument(required=True)
271@collections_argument(help=unwrap("""COLLECTIONS is or more expressions that identify the collections to
272 search for datasets. Glob-style expressions may be used but only if the
273 --find-all flag is also passed."""))
274@option_section("Query Datasets Options:")
275@datasets_option(help="One or more glob-style expressions that identify the dataset types to be pruned.",
276 multiple=True,
277 callback=split_commas)
278@find_all_option()
279@where_option(help=where_help)
280@option_section("Prune Options:")
281@disassociate_option()
282@purge_option()
283@unstore_option()
284@option_section("Execution Options:")
285@dry_run_option()
286@confirm_option()
287@quiet_option()
288@option_section("Other Options:")
289@options_file_option()
290def prune_datasets(**kwargs):
291 """Query for and remove one or more datasets from a collection and/or
292 storage.
293 """
294 quiet = kwargs.pop("quiet", False)
295 if quiet:
296 if kwargs["dry_run"]:
297 raise click.ClickException(pruneDatasets_errQuietWithDryRun)
298 kwargs["confirm"] = False
300 result = script.pruneDatasets(**kwargs)
302 if result.errPurgeAndDisassociate:
303 raise click.ClickException(pruneDatasets_errPurgeAndDisassociate)
304 return
305 if result.errNoCollectionRestriction:
306 raise click.ClickException(pruneDatasets_errNoCollectionRestriction)
307 if result.errPruneOnNotRun:
308 raise click.ClickException(pruneDatasets_errPruneOnNotRun.format(**result.errDict))
309 if result.errNoOp:
310 raise click.ClickException(pruneDatasets_errNoOp)
311 if result.dryRun:
312 if result.action["disassociate"] and result.action["unstore"]:
313 msg = pruneDatasets_wouldDisassociateAndRemoveMsg
314 elif result.action["disassociate"]:
315 msg = pruneDatasets_wouldDisassociateMsg
316 else:
317 msg = pruneDatasets_wouldRemoveMsg
318 print(msg.format(**result.action))
319 printAstropyTables(result.tables)
320 return
321 if result.confirm:
322 if not result.tables:
323 print(pruneDatasets_noDatasetsFound)
324 return
325 print(pruneDatasets_willRemoveMsg)
326 printAstropyTables(result.tables)
327 doContinue = click.confirm(pruneDatasets_askContinueMsg, default=False)
328 if doContinue:
329 result.onConfirmation()
330 print(pruneDatasets_didRemoveAforementioned)
331 else:
332 print(pruneDatasets_didNotRemoveAforementioned)
333 return
334 if result.finished:
335 if not quiet:
336 print(pruneDatasets_didRemoveMsg)
337 printAstropyTables(result.tables)
338 return
341@click.command(short_help="Search for collections.", cls=ButlerCommand)
342@repo_argument(required=True)
343@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the "
344 "collections to return.")
345@collection_type_option()
346@click.option("--chains",
347 default="table",
348 help=unwrap("""Affects how results are presented. TABLE lists each dataset in a row with
349 chained datasets' children listed in a Definition column. TREE lists children below
350 their parent in tree form. FLATTEN lists all datasets, including child datasets in
351 one list.Defaults to TABLE. """),
352 callback=to_upper,
353 type=click.Choice(("TABLE", "TREE", "FLATTEN"), case_sensitive=False))
354@options_file_option()
355def query_collections(*args, **kwargs):
356 """Get the collections whose names match an expression."""
357 table = script.queryCollections(*args, **kwargs)
358 # The unit test that mocks script.queryCollections does not return a table
359 # so we need the following `if`.
360 if table:
361 # When chains==TREE, the children of chained datasets are indented
362 # relative to their parents. For this to work properly the table must
363 # be left-aligned.
364 table.pprint_all(align="<")
367@click.command(cls=ButlerCommand)
368@repo_argument(required=True)
369@glob_argument(help="GLOB is one or more glob-style expressions that fully or partially identify the "
370 "dataset types to return.")
371@verbose_option(help="Include dataset type name, dimensions, and storage class in output.")
372@components_option()
373@options_file_option()
374def query_dataset_types(*args, **kwargs):
375 """Get the dataset types in a repository."""
376 table = script.queryDatasetTypes(*args, **kwargs)
377 if table:
378 table.pprint_all()
379 else:
380 print("No results. Try --help for more information.")
383@click.command(cls=ButlerCommand)
384@repo_argument(required=True)
385@click.argument('dataset-type-name', nargs=1)
386def remove_dataset_type(*args, **kwargs):
387 """Remove a dataset type definition from a repository."""
388 script.removeDatasetType(*args, **kwargs)
391@click.command(cls=ButlerCommand)
392@query_datasets_options()
393@options_file_option()
394def query_datasets(**kwargs):
395 """List the datasets in a repository."""
396 for table in script.QueryDatasets(**kwargs).getTables():
397 print("")
398 table.pprint_all()
399 print("")
402@click.command(cls=ButlerCommand)
403@repo_argument(required=True)
404@click.argument('input-collection')
405@click.argument('output-collection')
406@click.argument('dataset-type-name')
407@click.option("--begin-date", type=str, default=None,
408 help=unwrap("""ISO-8601 datetime (TAI) of the beginning of the validity range for the
409 certified calibrations."""))
410@click.option("--end-date", type=str, default=None,
411 help=unwrap("""ISO-8601 datetime (TAI) of the end of the validity range for the
412 certified calibrations."""))
413@click.option("--search-all-inputs", is_flag=True, default=False,
414 help=unwrap("""Search all children of the inputCollection if it is a CHAINED collection,
415 instead of just the most recent one."""))
416@options_file_option()
417def certify_calibrations(*args, **kwargs):
418 """Certify calibrations in a repository.
419 """
420 script.certifyCalibrations(*args, **kwargs)
423@click.command(cls=ButlerCommand)
424@repo_argument(required=True)
425@dimensions_argument(help=unwrap("""DIMENSIONS are the keys of the data IDs to yield, such as exposure,
426 instrument, or tract. Will be expanded to include any dependencies."""))
427@collections_option()
428@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should
429 constrain the yielded data IDs. For example, including "raw" here would
430 constrain the yielded "instrument", "exposure", "detector", and
431 "physical_filter" values to only those for which at least one "raw" dataset
432 exists in "collections"."""))
433@where_option(help=where_help)
434@options_file_option()
435def query_data_ids(**kwargs):
436 """List the data IDs in a repository.
437 """
438 table = script.queryDataIds(**kwargs)
439 if table:
440 table.pprint_all()
441 else:
442 if not kwargs.get("dimensions") and not kwargs.get("datasets"):
443 print("No results. Try requesting some dimensions or datasets, see --help for more information.")
444 else:
445 print("No results. Try --help for more information.")
448@click.command(cls=ButlerCommand)
449@repo_argument(required=True)
450@element_argument(required=True)
451@datasets_option(help=unwrap("""An expression that fully or partially identifies dataset types that should
452 constrain the yielded records. Only affects results when used with
453 --collections."""))
454@collections_option(help=collections_option.help + " Only affects results when used with --datasets.")
455@where_option(help=where_help)
456@click.option("--no-check", is_flag=True,
457 help=unwrap("""Don't check the query before execution. By default the query is checked before it
458 executed, this may reject some valid queries that resemble common mistakes."""))
459@options_file_option()
460def query_dimension_records(**kwargs):
461 """Query for dimension information."""
462 table = script.queryDimensionRecords(**kwargs)
463 if table:
464 table.pprint_all()
465 else:
466 print("No results. Try --help for more information.")
469@click.command(cls=ButlerCommand)
470@repo_argument(required=True)
471@query_datasets_options(showUri=False, useArguments=False, repo=False)
472@destination_argument(help="Destination URI of folder to receive file artifacts.")
473@transfer_option()
474@verbose_option(help="Report destination location of all transferred artifacts.")
475@click.option("--preserve-path/--no-preserve-path", is_flag=True, default=True,
476 help="Preserve the datastore path to the artifact at the destination.")
477@click.option("--clobber/--no-clobber", is_flag=True, default=False,
478 help="If clobber, overwrite files if they exist locally.")
479@options_file_option()
480def retrieve_artifacts(**kwargs):
481 """Retrieve file artifacts associated with datasets in a repository."""
482 verbose = kwargs.pop("verbose")
483 transferred = script.retrieveArtifacts(**kwargs)
484 if verbose and transferred:
485 print(f"Transferred the following to {kwargs['destination']}:")
486 for uri in transferred:
487 print(uri)
488 print()
489 print(f"Number of artifacts retrieved into destination {kwargs['destination']}: {len(transferred)}")
492@click.command(cls=ButlerCommand)
493@click.argument("source", required=True)
494@click.argument("dest", required=True)
495@query_datasets_options(showUri=False, useArguments=False, repo=False)
496@transfer_option()
497@options_file_option()
498def transfer_datasets(**kwargs):
499 """Transfer datasets from a source butler to a destination butler.
501 SOURCE is a URI to the Butler repository containing the RUN dataset.
503 DEST is a URI to the Butler repository that will receive copies of the
504 datasets.
505 """
506 number = script.transferDatasets(**kwargs)
507 print(f"Number of datasets transferred: {number}")
510@click.command(cls=ButlerCommand)
511@repo_argument(required=True)
512@click.argument("parent", required=True, nargs=1)
513@click.argument("children", required=False, nargs=-1, callback=split_commas)
514@click.option("--doc", default="",
515 help="Documentation string associated with this collection. "
516 "Only relevant if the collection is newly created.")
517@click.option("--flatten/--no-flatten", default=False,
518 help="If `True` recursively flatten out any nested chained collections in children first.")
519@click.option("--mode",
520 type=click.Choice(["redefine", "extend", "remove", "prepend", "pop"]),
521 default="redefine",
522 help="Update mode: "
523 "'redefine': Create new chain or redefine existing chain with the supplied CHILDREN. "
524 "'remove': Modify existing chain to remove the supplied CHILDREN. "
525 "'pop': Pop a numbered element off the chain. Defaults to popping "
526 "the first element (0). ``children`` must be integers if given. "
527 "'prepend': Modify existing chain to prepend the supplied CHILDREN to the front. "
528 "'extend': Modify existing chain to extend it with the supplied CHILDREN.")
529def collection_chain(**kwargs):
530 """Define a collection chain.
532 PARENT is the name of the chained collection to create or modify. If the
533 collection already exists the chain associated with it will be updated.
535 CHILDREN are the collections to be used to modify the chain. The supplied
536 values will be split on comma. The exact usage depends on the MODE option.
537 For example,
539 $ butler collection-chain REPO PARENT child1,child2 child3
541 will result in three children being included in the chain.
543 When the MODE is 'pop' the CHILDREN should be integer indices indicating
544 collections to be removed from the current chain.
545 MODE 'pop' can take negative integers to indicate removal relative to the
546 end of the chain, but when doing that '--' must be given to indicate the
547 end of the options specification.
549 $ butler collection-chain REPO --mode=pop PARENT -- -1
551 Will remove the final collection from the chain.
552 """
553 chain = script.collectionChain(**kwargs)
554 print(f"[{', '.join(chain)}]")
557@click.command(cls=ButlerCommand)
558@repo_argument(required=True)
559@click.argument("dataset_type", required=True)
560@click.argument("run", required=True)
561@click.argument("table_file", required=True)
562@click.option("--formatter", type=str,
563 help="Fully-qualified python class to use as the Formatter. If not specified the formatter"
564 " will be determined from the dataset type and datastore configuration.")
565@click.option("--id-generation-mode",
566 default="UNIQUE",
567 help="Mode to use for generating dataset IDs. The default creates a unique ID. Other options"
568 " are: 'DATAID_TYPE' for creating a reproducible ID from the dataID and dataset type;"
569 " 'DATAID_TYPE_RUN' for creating a reproducible ID from the dataID, dataset type and run."
570 " The latter is usually used for 'raw'-type data that will be ingested in multiple."
571 " repositories.",
572 callback=to_upper,
573 type=click.Choice(("UNIQUE", "DATAID_TYPE", "DATAID_TYPE_RUN"), case_sensitive=False))
574@click.option("--data-id",
575 type=str,
576 multiple=True, callback=split_commas,
577 help="Keyword=value string with an additional dataId value that is fixed for all ingested"
578 " files. This can be used to simplify the table file by removing repeated entries that are"
579 " fixed for all files to be ingested. Multiple key/values can be given either by using"
580 " comma separation or multiple command line options.")
581@click.option("--prefix",
582 type=str,
583 help="For relative paths in the table file, specify a prefix to use. The default is to"
584 " use the current working directory.")
585@transfer_option()
586def ingest_files(**kwargs):
587 """Ingest files from table file.
589 DATASET_TYPE is the name of the dataset type to be associated with these
590 files. This dataset type must already exist and will not be created by
591 this command. There can only be one dataset type per invocation of this
592 command.
594 RUN is the run to use for the file ingest.
596 TABLE_FILE refers to a file that can be read by astropy.table with
597 columns of:
599 file URI, dimension1, dimension2, ..., dimensionN
601 where the first column is the URI to the file to be ingested and the
602 remaining columns define the dataId to associate with that file.
603 The column names should match the dimensions for the specified dataset
604 type. Relative file URI by default is assumed to be relative to the
605 current working directory but can be overridden using the ``--prefix``
606 option.
608 This command does not create dimension records and so any records must
609 be created by other means. This command should not be used to ingest
610 raw camera exposures.
611 """
612 script.ingest_files(**kwargs)
615@click.command(cls=ButlerCommand)
616@repo_argument(required=True)
617@click.argument("dataset_type", required=True)
618@click.argument("storage_class", required=True)
619@click.argument("dimensions", required=False, nargs=-1)
620@click.option("--is-calibration/--no-is-calibration", is_flag=True, default=False,
621 help="Indicate that this dataset type can be part of a calibration collection.")
622def register_dataset_type(**kwargs):
623 """Register a new dataset type with this butler repository.
625 DATASET_TYPE is the name of the dataset type.
627 STORAGE_CLASS is the name of the StorageClass to be associated with
628 this dataset type.
630 DIMENSIONS is a list of all the dimensions relevant to this
631 dataset type. It can be an empty list.
633 A component dataset type (such as "something.component") is not a
634 real dataset type and so can not be defined by this command. They are
635 automatically derived from the composite dataset type when a composite
636 storage class is specified.
637 """
638 inserted = script.register_dataset_type(**kwargs)
639 if inserted:
640 print("Dataset type successfully registered.")
641 else:
642 print("Dataset type already existed in identical form.")
645@click.command(cls=ButlerCommand)
646@repo_argument(required=True, help=willCreateRepoHelp)
647@directory_argument(required=True)
648@collections_argument(help="COLLECTIONS are the collection to export calibrations from.")
649def export_calibs(*args, **kwargs):
650 """Export calibrations from the butler for import elsewhere."""
651 table = script.exportCalibs(*args, **kwargs)
652 if table:
653 table.pprint_all(align="<")