Coverage for python/lsst/ctrl/bps/bps_utils.py: 26%
83 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-19 10:08 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-19 10:08 +0000
1# This file is part of ctrl_bps.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22"""Misc supporting classes and functions for BPS.
23"""
25__all__ = [
26 "chdir",
27 "create_job_quantum_graph_filename",
28 "save_qg_subgraph",
29 "_create_execution_butler",
30 "create_count_summary",
31 "parse_count_summary",
32 "_dump_pkg_info",
33 "_dump_env_info",
34]
36import contextlib
37import dataclasses
38import logging
39import os
40import shlex
41import subprocess
42from collections import Counter
43from enum import Enum
44from pathlib import Path
46import yaml
47from lsst.utils.packages import Packages
49_LOG = logging.getLogger(__name__)
52class WhenToSaveQuantumGraphs(Enum):
53 """Values for when to save the job quantum graphs."""
55 QGRAPH = 1 # Must be using single_quantum_clustering algorithm.
56 TRANSFORM = 2
57 PREPARE = 3
58 SUBMIT = 4
59 NEVER = 5 # Always use full QuantumGraph.
62@contextlib.contextmanager
63def chdir(path):
64 """A chdir function that can be used inside a context.
66 Parameters
67 ----------
68 path : `str` or `pathlib.Path`
69 Path to be made current working directory.
70 """
71 cur_dir = os.getcwd()
72 os.chdir(path)
73 try:
74 yield
75 finally:
76 os.chdir(cur_dir)
79def create_job_quantum_graph_filename(config, job, out_prefix=None):
80 """Create a filename to be used when storing the QuantumGraph
81 for a job.
83 Parameters
84 ----------
85 config : `lsst.ctrl.bps.BpsConfig`
86 BPS configuration.
87 job : `lsst.ctrl.bps.GenericWorkflowJob`
88 Job for which the QuantumGraph file is being saved.
89 out_prefix : `str`, optional
90 Path prefix for the QuantumGraph filename. If no out_prefix is given,
91 uses current working directory.
93 Returns
94 -------
95 full_filename : `str`
96 The filename for the job's QuantumGraph.
97 """
98 curvals = dataclasses.asdict(job)
99 if job.tags:
100 curvals.update(job.tags)
101 found, subdir = config.search("subDirTemplate", opt={"curvals": curvals})
102 if not found:
103 subdir = "{job.label}"
104 full_filename = Path("inputs") / subdir / f"quantum_{job.name}.qgraph"
106 if out_prefix is not None:
107 full_filename = Path(out_prefix) / full_filename
109 return str(full_filename)
112def save_qg_subgraph(qgraph, out_filename, node_ids=None):
113 """Save subgraph to file.
115 Parameters
116 ----------
117 qgraph : `lsst.pipe.base.QuantumGraph`
118 QuantumGraph to save.
119 out_filename : `str`
120 Name of the output file.
121 node_ids : `list` [`lsst.pipe.base.NodeId`]
122 NodeIds for the subgraph to save to file.
123 """
124 if not os.path.exists(out_filename):
125 _LOG.debug("Saving QuantumGraph with %d nodes to %s", len(qgraph), out_filename)
126 if node_ids is None:
127 qgraph.saveUri(out_filename)
128 else:
129 qgraph.subset(qgraph.getQuantumNodeByNodeId(nid) for nid in node_ids).saveUri(out_filename)
130 else:
131 _LOG.debug("Skipping saving QuantumGraph to %s because already exists.", out_filename)
134def _create_execution_butler(config, qgraph_filename, execution_butler_dir, out_prefix):
135 """Create the execution butler for use by the compute jobs.
137 Parameters
138 ----------
139 config : `lsst.ctrl.bps.BpsConfig`
140 BPS configuration.
141 qgraph_filename : `str`
142 Run QuantumGraph filename.
143 execution_butler_dir : `str`
144 Directory in which to create the execution butler.
145 out_prefix : `str` or None
146 Prefix for output filename to contain both stdout and stderr.
148 Raises
149 ------
150 CalledProcessError
151 Raised if command to create execution butler exits with non-zero
152 exit code.
153 """
154 _, command = config.search(
155 ".executionButler.createCommand",
156 opt={
157 "curvals": {"executionButlerDir": execution_butler_dir, "qgraphFile": qgraph_filename},
158 "replaceVars": True,
159 },
160 )
161 out_filename = "execution_butler_creation.out"
162 if out_prefix is not None:
163 out_filename = os.path.join(out_prefix, out_filename)
165 # When creating the execution Butler, handle separately errors related
166 # to creating the log file and errors directly related to creating
167 # the execution Butler itself.
168 opening = "cannot create the execution Butler"
169 try:
170 with open(out_filename, "w", encoding="utf-8") as fh:
171 print(command, file=fh)
172 print("\n", file=fh) # Note: want a blank line
173 subprocess.run(shlex.split(command), shell=False, check=True, stdout=fh, stderr=subprocess.STDOUT)
174 except OSError as exc:
175 raise type(exc)(f"{opening}: {exc.strerror}") from None
176 except subprocess.SubprocessError as exc:
177 raise RuntimeError(f"{opening}, see '{out_filename}' for details") from exc
180def create_count_summary(counts):
181 """Create summary from count mapping.
183 Parameters
184 ----------
185 counts : `collections.Counter` or `dict` [`str`, `int`]
186 Mapping of counts to keys.
188 Returns
189 -------
190 summary : `str`
191 Semi-colon delimited string of key:count pairs.
192 (e.g. "key1:cnt1;key2;cnt2") Parsable by
193 parse_count_summary().
194 """
195 summary = ""
196 if isinstance(counts, dict):
197 summary = ";".join([f"{key}:{counts[key]}" for key in counts])
198 return summary
201def parse_count_summary(summary):
202 """Parse summary into count mapping.
204 Parameters
205 ----------
206 summary : `str`
207 Semi-colon delimited string of key:count pairs.
209 Returns
210 -------
211 counts : `collections.Counter`
212 Mapping representation of given summary for easier
213 individual count lookup.
214 """
215 counts = Counter()
216 for part in summary.split(";"):
217 label, count = part.split(":")
218 counts[label] = count
219 return counts
222def _dump_pkg_info(filename):
223 """Save information about versions of packages in use for future reference.
225 Parameters
226 ----------
227 filename : `str`
228 The name of the file where to save the information about the versions
229 of the packages.
230 """
231 file = Path(filename)
232 if file.suffix.lower() not in {".yaml", ".yml"}:
233 file = file.with_suffix(f"{file.suffix}.yaml")
234 packages = Packages.fromSystem()
235 packages.write(str(file))
238def _dump_env_info(filename):
239 """Save information about runtime environment for future reference.
241 Parameters
242 ----------
243 filename : `str`
244 The name of the file where to save the information about the runtime
245 environment.
246 """
247 file = Path(filename)
248 if file.suffix.lower() not in {".yaml", ".yml"}:
249 file = file.with_suffix(f"{file.suffix}.yaml")
250 with open(file, "w", encoding="utf-8") as fh:
251 yaml.dump(dict(os.environ), fh)