Coverage for python/lsst/ctrl/mpexec/cli/script/qgraph.py: 38%
14 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-08 02:55 -0700
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-08 02:55 -0700
1# This file is part of ctrl_mpexec.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28import logging
29from types import SimpleNamespace
31from lsst.pipe.base.all_dimensions_quantum_graph_builder import DatasetQueryConstraintVariant
33from ... import CmdLineFwk
35_log = logging.getLogger(__name__)
38def qgraph( # type: ignore
39 pipelineObj,
40 qgraph,
41 qgraph_id,
42 qgraph_node_id,
43 qgraph_datastore_records,
44 skip_existing_in,
45 skip_existing,
46 save_qgraph,
47 save_single_quanta,
48 qgraph_dot,
49 butler_config,
50 input,
51 output,
52 output_run,
53 extend_run,
54 replace_run,
55 prune_replaced,
56 data_query,
57 show,
58 save_execution_butler,
59 clobber_execution_butler,
60 target_datastore_root,
61 transfer,
62 clobber_outputs,
63 dataset_query_constraint,
64 rebase,
65 show_qgraph_header=False,
66 mock=False,
67 unmocked_dataset_types=(),
68 mock_failure=(),
69 **kwargs,
70):
71 """Implement the command line interface `pipetask qgraph` subcommand.
73 Should only be called by command line tools and unit test code that test
74 this function.
76 Parameters
77 ----------
78 pipelineObj : `lsst.pipe.base.Pipeline` or None
79 The pipeline object used to generate a qgraph. If this is not `None`
80 then `qgraph` should be `None`.
81 qgraph : `str` or `None`
82 URI location for a serialized quantum graph definition as a pickle
83 file. If this option is not None then `pipeline` should be `None`.
84 qgraph_id : `str` or `None`
85 Quantum graph identifier, if specified must match the identifier of the
86 graph loaded from a file. Ignored if graph is not loaded from a file.
87 qgraph_node_id : `list` of `int`, optional
88 Only load a specified set of nodes if graph is loaded from a file,
89 nodes are identified by integer IDs.
90 qgraph_datastore_records : `bool`
91 If True then include datastore records into generated quanta.
92 skip_existing_in : `list` [ `str` ]
93 Accepts list of collections, if all Quantum outputs already exist in
94 the specified list of collections then that Quantum will be excluded
95 from the QuantumGraph.
96 skip_existing : `bool`
97 Appends output RUN collection to the ``skip_existing_in`` list.
98 save_qgraph : `str` or `None`
99 URI location for storing a serialized quantum graph definition as a
100 pickle file.
101 save_single_quanta : `str` or `None`
102 Format string of URI locations for storing individual quantum graph
103 definition (pickle files). The curly brace {} in the input string will
104 be replaced by a quantum number.
105 qgraph_dot : `str` or `None`
106 Path location for storing GraphViz DOT representation of a quantum
107 graph.
108 butler_config : `str`, `dict`, or `lsst.daf.butler.Config`
109 If `str`, `butler_config` is the path location of the gen3
110 butler/registry config file. If `dict`, `butler_config` is key value
111 pairs used to init or update the `lsst.daf.butler.Config` instance. If
112 `Config`, it is the object used to configure a Butler.
113 input : `list` [ `str` ]
114 List of names of the input collection(s).
115 output : `str`
116 Name of the output CHAINED collection. This may either be an existing
117 CHAINED collection to use as both input and output (if `input` is
118 `None`), or a new CHAINED collection created to include all inputs
119 (if `input` is not `None`). In both cases, the collection's children
120 will start with an output RUN collection that directly holds all new
121 datasets (see `output_run`).
122 output_run : `str`
123 Name of the new output RUN collection. If not provided then `output`
124 must be provided and a new RUN collection will be created by appending
125 a timestamp to the value passed with `output`. If this collection
126 already exists then `extend_run` must be passed.
127 extend_run : `bool`
128 Instead of creating a new RUN collection, insert datasets into either
129 the one given by `output_run` (if provided) or the first child
130 collection of `output` (which must be of type RUN).
131 replace_run : `bool`
132 Before creating a new RUN collection in an existing CHAINED collection,
133 remove the first child collection (which must be of type RUN). This can
134 be used to repeatedly write to the same (parent) collection during
135 development, but it does not delete the datasets associated with the
136 replaced run unless `prune-replaced` is also True. Requires `output`,
137 and `extend_run` must be `None`.
138 prune_replaced : `str` or `None`
139 If not `None`, delete the datasets in the collection replaced by
140 `replace_run`, either just from the datastore ("unstore") or by
141 removing them and the RUN completely ("purge"). Requires
142 ``replace_run`` to be `True`.
143 data_query : `str`
144 User query selection expression.
145 show : `lsst.ctrl.mpexec.showInfo.ShowInfo`
146 Descriptions of what to dump to stdout.
147 save_execution_butler : `str` or `None`
148 URI location for storing an execution Butler build from the
149 QuantumGraph.
150 clobber_execution_butler : `bool`
151 It True overwrite existing execution butler files if present.
152 target_datastore_root : `str` or `None`
153 URI location for the execution butler's datastore.
154 transfer : `str` or `None`
155 Transfer mode for execution butler creation. This should be a
156 ``transfer`` string recognized by
157 :func:`lsst.resources.ResourcePath.transfer_from`.
158 clobber_outputs : `bool`
159 Remove outputs from previous execution of the same quantum before new
160 execution. If ``skip_existing`` is also passed, then only failed
161 quanta will be clobbered.
162 dataset_query_constraint : `str`
163 Control constraining graph building using pre-existing dataset types.
164 Valid values are off, all, or a comma separated list of dataset type
165 names.
166 rebase : `bool`
167 If `True` then reset output collection chain if it is inconsistent with
168 the ``inputs``.
169 show_qgraph_header : bool, optional
170 Controls if the headerData of a QuantumGraph should be printed to the
171 terminal. Defaults to False.
172 mock : `bool`, optional
173 If True, use a mocked version of the pipeline.
174 unmocked_dataset_types : `collections.abc.Sequence` [ `str` ], optional
175 List of overall-input dataset types that should not be mocked.
176 mock_failure : `~collections.abc.Sequence`, optional
177 List of quanta that should raise exceptions.
178 **kwargs : `dict` [`str`, `str`]
179 Ignored; click commands may accept options for more than one script
180 function and pass all the option kwargs to each of the script functions
181 which ignore these unused kwargs.
183 Returns
184 -------
185 qgraph : `lsst.pipe.base.QuantumGraph`
186 The qgraph object that was created.
187 """
188 dataset_query_constraint = DatasetQueryConstraintVariant.fromExpression(dataset_query_constraint)
189 args = SimpleNamespace(
190 qgraph=qgraph,
191 qgraph_id=qgraph_id,
192 qgraph_node_id=qgraph_node_id,
193 qgraph_datastore_records=qgraph_datastore_records,
194 save_qgraph=save_qgraph,
195 save_single_quanta=save_single_quanta,
196 qgraph_dot=qgraph_dot,
197 butler_config=butler_config,
198 input=input,
199 output=output,
200 output_run=output_run,
201 extend_run=extend_run,
202 replace_run=replace_run,
203 prune_replaced=prune_replaced,
204 data_query=data_query,
205 skip_existing_in=skip_existing_in,
206 skip_existing=skip_existing,
207 execution_butler_location=save_execution_butler,
208 clobber_execution_butler=clobber_execution_butler,
209 target_datastore_root=target_datastore_root,
210 transfer=transfer,
211 clobber_outputs=clobber_outputs,
212 dataset_query_constraint=dataset_query_constraint,
213 rebase=rebase,
214 show_qgraph_header=show_qgraph_header,
215 mock=mock,
216 unmocked_dataset_types=list(unmocked_dataset_types),
217 mock_failure=mock_failure,
218 )
220 f = CmdLineFwk()
221 qgraph = f.makeGraph(pipelineObj, args)
223 if qgraph is None:
224 return None
226 # optionally dump some info.
227 show.show_graph_info(qgraph, args)
229 return qgraph