Coverage for bin.src/export-results.py : 87%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#!/usr/bin/env python
2from __future__ import division
3from __future__ import print_function
4import re
5import sys
6import numpy as np
7import lsst.daf.persistence as dafPersist
8import lsst.log
10lsst.log.configure_prop("""
11log4j.rootLogger=INFO, A1
12log4j.appender.A1=ConsoleAppender
13log4j.appender.A1.Target=System.err
14log4j.appender.A1.layout=PatternLayout
15""")
17if len(sys.argv) != 2: 17 ↛ 18line 17 didn't jump to line 18, because the condition on line 17 was never true
18 print("Usage: export-results <output_directory>", file=sys.stderr)
19 exit(1)
20outputdir = sys.argv[1]
22# Load sources and print interesting columns
24cols = ("id",
25 "coord_ra",
26 "coord_dec",
27 "flags_negative",
28 "base_SdssCentroid_flag",
29 "base_PixelFlags_flag_edge",
30 "base_PixelFlags_flag_interpolated",
31 "base_PixelFlags_flag_interpolatedCenter",
32 "base_PixelFlags_flag_saturated",
33 "base_PixelFlags_flag_saturatedCenter",
34 "base_SdssCentroid_x",
35 "base_SdssCentroid_y",
36 "base_SdssCentroid_xErr",
37 "base_SdssCentroid_yErr",
38 "base_SdssShape_xx",
39 "base_SdssShape_xy",
40 "base_SdssShape_yy",
41 "base_SdssShape_xxErr",
42 "base_SdssShape_xyErr",
43 "base_SdssShape_yyErr",
44 "base_SdssShape_flag",
45 "base_GaussianFlux_instFlux",
46 "base_GaussianFlux_instFluxErr",
47 "base_PsfFlux_instFlux",
48 "base_PsfFlux_instFluxErr",
49 "base_CircularApertureFlux_6_0_instFlux",
50 "base_CircularApertureFlux_6_0_instFluxErr",
51 "base_ClassificationExtendedness_value",
52 )
54headerPrinted = False
55butler = dafPersist.Butler(outputdir)
56for filter in "ugriz":
57 for dataId in (dict(run=4192, filter=filter, field=300, camcol=4),
58 dict(run=6377, filter=filter, field=399, camcol=4),
59 ):
60 if not butler.datasetExists("src", **dataId):
61 continue
63 srcs = butler.get("src", **dataId)
64 if not headerPrinted:
65 print('#' + ' '.join(cols))
66 headerPrinted = True
67 vecs = []
68 for col in cols:
69 if col not in srcs.schema:
70 # If the column is not in the source table, we fill it
71 # with a "-". We can therefore check optional columns
72 # like ``flags_negative``.
73 v = ["-"] * len(srcs)
74 elif col.endswith(".ra") or col.endswith(".dec") or col.endswith("_ra") or col.endswith("_dec"):
75 v = np.rad2deg(srcs.get(col))
76 elif re.search(r"\.err\.(xx|yy|xy)$", col): 76 ↛ 77line 76 didn't jump to line 77, because the condition on line 76 was never true
77 field, which = re.search(r"^(.*\.err)\.(xx|yy|xy)$", col).groups()
78 key = srcs.schema.find(field).key
79 key = key[0, 0] if which == "xx" else key[1, 1] if which == "yy" else key[0, 1]
81 v = srcs.get(key)
82 else:
83 v = srcs.get(col)
84 v = np.asarray(v)
85 vecs.append(v)
87 for vals in zip(*vecs):
88 # To future proof the comparison, we use an explicit format for floating point types since a
89 # default format could be ambiguous.
90 print(' '.join(['{0:.12g}'.format(el) if issubclass(el.dtype.type, np.floating)
91 else str(el) for el in vals]))