Coverage for tests/butler_utils.py : 29%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of verify.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
23__all__ = ["make_test_butler", "make_dataset_type", "ref_from_connection",
24 "run_quantum"]
27from lsst.daf.butler import Butler, DatasetType, DataCoordinate, DatasetRef
28from lsst.pipe.base import ButlerQuantumContext
31# TODO: factor this out into a pipeline testing library
32def make_test_butler(root, data_ids):
33 """Create an empty repository with default configuration.
35 Parameters
36 ----------
37 root : `str`
38 The location of the root directory for the repository.
39 data_ids : `dict` [`str`, `iterable` [`dict`]]
40 A dictionary keyed by the dimensions used in the test. Each value
41 is a dictionary of fields and values for that dimension. See
42 :file:`daf/butler/config/dimensions.yaml` for required fields,
43 listed as "keys" and "requires" under each dimension's entry.
45 Returns
46 -------
47 butler : `lsst.daf.butler.Butler`
48 A Butler referring to the new repository.
49 """
50 # TODO: takes 5 seconds to run; split up into class-level Butler
51 # with test-level runs after DM-21246
52 Butler.makeRepo(root)
53 butler = Butler(root, run="test")
54 for dimension, values in data_ids.items():
55 butler.registry.insertDimensionData(dimension, *values)
56 return butler
59def make_dataset_type(butler, name, dimensions, storageClass):
60 """Create a dataset type in a particular repository.
62 Parameters
63 ----------
64 butler : `lsst.daf.butler.Butler`
65 The repository to update.
66 name : `str`
67 The name of the dataset type.
68 dimensions : `set` [`str`]
69 The dimensions of the new dataset type.
70 storageClass : `str`
71 The storage class the dataset will use.
73 Returns
74 -------
75 dataset_type : `lsst.daf.butler.DatasetType`
76 The new type.
78 Raises
79 ------
80 ValueError
81 Raised if the dimensions or storage class are invalid.
82 ConflictingDefinitionError
83 Raised if another dataset type with the same name already exists.
84 """
85 dataset_type = DatasetType(name, dimensions, storageClass,
86 universe=butler.registry.dimensions)
87 butler.registry.registerDatasetType(dataset_type)
88 return dataset_type
91def ref_from_connection(butler, connection, data_id):
92 """Create a DatasetRef for a connection in a collection.
94 Parameters
95 ----------
96 butler : `lsst.daf.butler.Butler`
97 The collection to point to.
98 connection : `lsst.pipe.base.connectionTypes.DimensionedConnection`
99 The connection defining the dataset type to point to.
100 data_id : `Mapping` [`str`] or `lsst.daf.butler.DataCoordinate`
101 The data ID for the dataset to point to.
103 Returns
104 -------
105 ref : `lsst.daf.butler.DatasetRef`
106 A reference to a dataset compatible with ``connection``, with ID
107 ``data_id``, in the collection pointed to by ``butler``.
108 """
109 universe = butler.registry.dimensions
110 data_id = DataCoordinate.standardize(data_id, universe=universe)
111 return DatasetRef(
112 datasetType=connection.makeDatasetType(universe),
113 dataId=data_id,
114 )
117def run_quantum(task, butler, quantum):
118 """Run a PipelineTask on a Quantum.
120 Parameters
121 ----------
122 task : `lsst.pipe.base.PipelineTask`
123 The task to run on the quantum.
124 butler : `lsst.daf.butler.Butler`
125 The collection to run on.
126 quantum : `lsst.daf.butler.Quantum`
127 The quantum to run.
128 """
129 butler_qc = ButlerQuantumContext(butler, quantum)
130 connections = task.config.ConnectionsClass(config=task.config)
131 input_refs, output_refs = connections.buildDatasetRefs(quantum)
132 task.runQuantum(butler_qc, input_refs, output_refs)