Coverage for python/lsst/verify/bin/dispatchverify.py : 12%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of verify.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21"""Upload LSST Science Pipelines Verification `~lsst.verify.Job` datasets to
22the SQUASH dashboard.
24Job JSON files can be created by `lsst.verify.Job.write` or
25`lsst.verify.output_quantities`. A `~lsst.verify.Job` dataset consists of
26metric measurements, associated blobs, and pipeline execution metadata.
27Individual LSST Science Pipelines tasks typically write separate JSON datasets.
28This command can collect and combine multiple Job JSON datasets into a single
29Job upload.
31**Configuration**
33dispatch_verify.py is configurable from both the command line and environment
34variables. See the argument documenation for environment variable equivalents.
35Command line settings override environment variable configuration.
37**Metadata and environment**
39dispatch_verify.py can enrich Verification Job metadata with information
40from the environment. Currently dispatch_verify.py supports the Jenkins CI
41and the LSST Data Facility (LDF) execution environments.
43In the Jenkins CI execution environment (``--env=jenkins``) the
44following environment variables are consumed:
46- ``BUILD_ID``: ID in the CI system
47- ``BUILD_URL``: CI page with information about the build
48- ``PRODUCT``: the name of the product built, e.g. 'validate_drp'
49- ``dataset``: the name of the dataset processed, e.g. 'validation_data_cfht'
50- ``label``: the name of the platform where it runs
52If ``--lsstsw`` is used, additional Git branch information is included with
53Science Pipelines package metadata.
55In the LSST Data Facility execution environment (``--env=ldf``) the following
56environment variables are consumed:
58- ``DATASET``: the name of the dataset processed, e.g 'HSC RC2'
59- ``DATASET_REPO_URL``: a reference URL with information about the dataset
60- ``RUN_ID``: ID of the run in the LDF environment
61- ``RUN_ID_URL``: a reference URL with information about the run
62- ``VERSION_TAG``: the version tag of the LSST software used, e.g. 'w_2018_18'
64Note: currently it is not possible to gather Science Pipelines package metadata
65in the LDF environment, thus if ``--env=ldf`` is used ``--ignore-lsstsw`` is
66aslo used by default in this environment.
67"""
68# For determining what is documented in Sphinx
69__all__ = ['build_argparser', 'main', 'insert_lsstsw_metadata',
70 'insert_extra_package_metadata', 'insert_env_metadata',
71 'Configuration']
73import argparse
74import os
75import json
76import getpass
78try:
79 import git
80except ImportError:
81 # GitPython is not a standard Stack package; skip gracefully if unavailable
82 git = None
84import lsst.log
85from lsst.verify import Job
86from lsst.verify.metadata.lsstsw import LsstswRepos
87from lsst.verify.metadata.eupsmanifest import Manifest
88from lsst.verify.metadata.jenkinsci import get_jenkins_env
89from lsst.verify.metadata.ldf import get_ldf_env
92def build_argparser():
93 parser = argparse.ArgumentParser(
94 description=__doc__,
95 formatter_class=argparse.RawDescriptionHelpFormatter,
96 epilog='More information is available at https://pipelines.lsst.io.')
98 parser.add_argument(
99 'json_paths',
100 nargs='+',
101 metavar='json',
102 help='Verification job JSON file, or files. When multiple JSON '
103 'files are present, their measurements, blobs, and metadata '
104 'are merged.')
105 parser.add_argument(
106 '--test',
107 default=False,
108 action='store_true',
109 help='Run this command without uploading to the SQUASH service. '
110 'The JSON payload is printed to standard out.')
111 parser.add_argument(
112 '--write',
113 metavar='PATH',
114 dest='output_filepath',
115 help='Write the merged and enriched Job JSON dataset to the given '
116 'path.')
117 parser.add_argument(
118 '--show',
119 dest='show_json',
120 action='store_true',
121 default=False,
122 help='Print the assembled Job JSON to standard output.')
123 parser.add_argument(
124 '--ignore-blobs',
125 dest='ignore_blobs',
126 action='store_true',
127 default=False,
128 help='Ignore data blobs even if they are available in the verification'
129 'job.')
131 env_group = parser.add_argument_group('Environment arguments')
132 env_group.add_argument(
133 '--env',
134 dest='env_name',
135 choices=Configuration.allowed_env,
136 help='Name of the environment where the verification job is being '
137 'run. In some environments display_verify.py will gather '
138 'additional metadata automatically:\n'
139 '\n'
140 'jenkins\n'
141 ' For the Jenkins CI (https://ci.lsst.codes)'
142 ' environment.\n'
143 'ldf\n'
144 ' For the LSST Data Facility environment. \n'
145 '\n'
146 'Equivalent to the $VERIFY_ENV environment variable.')
147 env_group.add_argument(
148 '--lsstsw',
149 dest='lsstsw',
150 metavar='PATH',
151 help='lsstsw directory path. If available, Stack package versions are '
152 'read from lsstsw. Equivalent to the ``$LSSTSW`` environment '
153 'variable. Disabled with ``--ignore-lsstsw.``')
154 env_group.add_argument(
155 '--package-repos',
156 dest='extra_package_paths',
157 nargs='*',
158 metavar='PATH',
159 help='Paths to additional Stack package Git repositories. These '
160 'packages are tracked in Job metadata, like lsstsw-based '
161 'packages.')
162 env_group.add_argument(
163 '--ignore-lsstsw',
164 dest='ignore_lsstsw',
165 action='store_true',
166 default=False,
167 help='Ignore lsstsw metadata even if it is available (for example, '
168 'the ``$LSSTSW`` variable is set).')
170 api_group = parser.add_argument_group('SQUASH API arguments')
171 api_group.add_argument(
172 '--url',
173 dest='api_url',
174 metavar='URL',
175 help='Root URL of the SQUASH API. Equivalent to the ``$SQUASH_URL`` '
176 'environment variable.')
177 api_group.add_argument(
178 '--user',
179 dest='api_user',
180 metavar='USER',
181 help='Username for SQUASH API. Equivalent to the $SQUASH_USER '
182 'environment variable.')
183 api_group.add_argument(
184 '--password',
185 dest='api_password',
186 metavar='PASSWORD',
187 help='Password for SQUASH API. Equivalent to the ``$SQUASH_PASSWORD`` '
188 'environment variable. If neither is set, you will be prompted.')
189 return parser
192def main():
193 """Entrypoint for the ``dispatch_verify.py`` command line executable.
194 """
195 log = lsst.log.Log.getLogger('verify.bin.dispatchverify.main')
197 parser = build_argparser()
198 args = parser.parse_args()
199 config = Configuration(args)
200 log.debug(str(config))
202 # Parse all Job JSON
203 jobs = []
204 for json_path in config.json_paths:
205 log.info('Loading {0}'.format(json_path))
206 with open(json_path) as fp:
207 json_data = json.load(fp)
208 # Ignore blobs from the verification jobs
209 if config.ignore_blobs:
210 log.info('Ignoring blobs from Job JSON {0}'.format(json_path))
211 json_data = delete_blobs(json_data)
212 job = Job.deserialize(**json_data)
213 jobs.append(job)
215 # Merge all Jobs into one
216 job = jobs.pop(0)
217 if len(jobs) > 0:
218 log.info('Merging verification Job JSON.')
219 for other_job in jobs:
220 job += other_job
222 # Ensure all measurements have a metric so that units are normalized
223 log.info('Refreshing metric definitions from verify_metrics')
224 job.reload_metrics_package('verify_metrics')
226 # Insert package metadata from lsstsw
227 if not config.ignore_lsstsw:
228 log.info('Inserting lsstsw package metadata from '
229 '{0}.'.format(config.lsstsw))
230 job = insert_lsstsw_metadata(job, config)
232 # Insert metadata from additional specified packages
233 if config.extra_package_paths is not None:
234 job = insert_extra_package_metadata(job, config)
236 # Add environment variable metadata from the Jenkins CI environment
237 if config.env_name == 'jenkins':
238 log.info('Inserting Jenkins CI environment metadata.')
239 jenkins_metadata = get_jenkins_env()
240 job = insert_env_metadata(job, 'jenkins', jenkins_metadata)
241 elif config.env_name == 'ldf':
242 log.info('Inserting LSST Data Facility environment metadata.')
243 ldf_metadata = get_ldf_env()
244 job = insert_env_metadata(job, 'ldf', ldf_metadata)
246 # Upload job
247 if not config.test:
248 log.info('Uploading Job JSON to {0}.'.format(config.api_url))
249 job.dispatch(api_user=config.api_user,
250 api_password=config.api_password,
251 api_url=config.api_url)
253 if config.show_json:
254 print(json.dumps(job.json,
255 sort_keys=True, indent=4, separators=(',', ': ')))
257 # Write a json file
258 if config.output_filepath is not None:
259 log.info('Writing Job JSON to {0}.'.format(config.output_filepath))
260 job.write(config.output_filepath)
263def delete_blobs(json_data):
264 """Delete data blobs from the Job JSON
265 """
266 if 'blobs' in json_data:
267 del json_data['blobs']
268 return json_data
271def insert_lsstsw_metadata(job, config):
272 """Insert metadata for lsstsw-based packages into ``Job.meta['packages']``.
273 """
274 lsstsw_repos = LsstswRepos(config.lsstsw)
276 with open(lsstsw_repos.manifest_path) as fp:
277 manifest = Manifest(fp)
279 packages = {}
280 for package_name, manifest_item in manifest.items():
281 package_doc = {
282 'name': package_name,
283 'git_branch': lsstsw_repos.get_package_branch(package_name),
284 'git_url': lsstsw_repos.get_package_repo_url(package_name),
285 'git_sha': manifest_item.git_sha,
286 'eups_version': manifest_item.version
287 }
288 packages[package_name] = package_doc
290 if 'packages' in job.meta:
291 # Extend packages entry
292 job.meta['packages'].update(packages)
293 else:
294 # Create new packages entry
295 job.meta['packages'] = packages
296 return job
299def insert_extra_package_metadata(job, config):
300 """Insert metadata for extra packages ('--package-repos') into
301 ``Job.meta['packages']``.
302 """
303 log = lsst.log.Log.getLogger(
304 'verify.bin.dispatchverify.insert_extra_package_metadata')
306 if 'packages' not in job.meta:
307 job.meta['packages'] = dict()
309 for package_path in config.extra_package_paths:
310 log.info('Inserting extra package metadata: {0}'.format(package_path))
311 package_name = package_path.split(os.sep)[-1]
313 package = {'name': package_name}
315 if git is not None:
316 git_repo = git.Repo(package_path)
317 package['git_sha'] = git_repo.active_branch.commit.hexsha
318 package['git_branch'] = git_repo.active_branch.name
319 package['git_url'] = git_repo.remotes.origin.url
321 if package_name in job.meta['packages']:
322 # Update pre-existing package metadata
323 job.meta['packages'][package_name].update(package)
324 else:
325 # Create new package metadata
326 job.meta['packages'][package_name] = package
328 return job
331def insert_env_metadata(job, env_name, metadata):
332 """Insert environment metadata into the Job.
333 """
334 metadata.update({'env_name': env_name})
335 job.meta['env'] = metadata
337 return job
340class Configuration(object):
341 """Configuration for dispatch_verify.py that reconciles command line and
342 environment variable arguments.
344 Configuration is validated for completeness and certain errors.
346 Parameters
347 ----------
348 args : `argparse.Namespace`
349 Parsed command line arguments, produced by `parse_args`.
350 """
352 allowed_env = ('jenkins', 'ldf')
354 def __init__(self, args):
355 self.json_paths = args.json_paths
357 self.test = args.test
359 self.output_filepath = args.output_filepath
361 self.show_json = args.show_json
363 self.env_name = args.env_name or os.getenv('VERIFY_ENV')
364 if self.env_name is not None and self.env_name not in self.allowed_env:
365 message = '$VERIFY_ENV not one of {0!s}'.format(self.allowed_env)
366 raise RuntimeError(message)
368 self.ignore_blobs = args.ignore_blobs
370 self.ignore_lsstsw = args.ignore_lsstsw
372 # Make sure --ignore-lsstw is used in the LDF environment
373 if self.env_name == 'ldf':
374 self.ignore_lsstsw = True
376 self.lsstsw = args.lsstsw or os.getenv('LSSTSW')
377 if self.lsstsw is not None:
378 self.lsstsw = os.path.abspath(self.lsstsw)
379 if not self.ignore_lsstsw and not self.lsstsw:
380 message = 'lsstsw directory not found at {0}'.format(self.lsstsw)
381 raise RuntimeError(message)
383 if args.extra_package_paths is not None:
384 self.extra_package_paths = [os.path.abspath(p)
385 for p in args.extra_package_paths]
386 else:
387 self.extra_package_paths = []
388 for path in self.extra_package_paths:
389 if not os.path.isdir(path):
390 message = 'Package directory not found: {0}'.format(path)
391 raise RuntimeError(message)
393 default_url = 'https://squash.lsst.codes/dashboard/api'
394 self.api_url = args.api_url or os.getenv('SQUASH_URL', default_url)
396 self.api_user = args.api_user or os.getenv('SQUASH_USER')
397 if not self.test and self.api_user is None:
398 message = '--user or $SQUASH_USER configuration required'
399 raise RuntimeError(message)
401 self.api_password = (args.api_password or
402 os.getenv('SQUASH_password'))
403 if not self.test and self.api_password is None:
404 # If password hasn't been set, prompt for it.
405 self.api_password = getpass.getpass(prompt="SQuaSH password: ")
407 def __str__(self):
408 configs = {
409 'json_paths': self.json_paths,
410 'test': self.test,
411 'output_filepath': self.output_filepath,
412 'show_json': self.show_json,
413 'ignore_blobs': self.ignore_blobs,
414 'env': self.env_name,
415 'ignore_lsstsw': self.ignore_lsstsw,
416 'lsstsw': self.lsstsw,
417 'extra_package_paths': self.extra_package_paths,
418 'api_url': self.api_url,
419 'api_user': self.api_user,
420 }
421 if self.api_password is None:
422 configs['api_password'] = None
423 else:
424 configs['api_password'] = '*' * len(self.api_password)
426 return json.dumps(configs,
427 sort_keys=True, indent=4, separators=(',', ': '))