Coverage for tests/testCalibrationMetrics.py : 13%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1from builtins import zip
2import matplotlib
3matplotlib.use("Agg")
4import numpy as np
5import unittest
6import lsst.sims.maf.metrics as metrics
7import lsst.sims.maf.stackers as stackers
8import lsst.utils.tests
9from builtins import str
12class TestCalibrationMetrics(unittest.TestCase):
14 def testParallaxMetric(self):
15 """
16 Test the parallax metric.
17 """
18 names = ['observationStartMJD', 'finSeeing', 'fiveSigmaDepth', 'fieldRA', 'fieldDec', 'filter']
19 types = [float, float, float, float, float, (np.str_, 1)]
20 data = np.zeros(700, dtype=list(zip(names, types)))
21 slicePoint = {'sid': 0}
22 data['observationStartMJD'] = np.arange(700)+56762
23 data['finSeeing'] = 0.7
24 data['filter'][0:100] = str('r')
25 data['filter'][100:200] = str('u')
26 data['filter'][200:] = str('g')
27 data['fiveSigmaDepth'] = 24.
28 stacker = stackers.ParallaxFactorStacker()
29 data = stacker.run(data)
30 normFlags = [False, True]
31 for flag in normFlags:
32 data['finSeeing'] = 0.7
33 data['fiveSigmaDepth'] = 24.
34 baseline = metrics.ParallaxMetric(normalize=flag, seeingCol='finSeeing').run(data, slicePoint)
35 data['finSeeing'] = data['finSeeing']+.3
36 worse1 = metrics.ParallaxMetric(normalize=flag, seeingCol='finSeeing').run(data, slicePoint)
37 worse2 = metrics.ParallaxMetric(normalize=flag, rmag=22.,
38 seeingCol='finSeeing').run(data, slicePoint)
39 worse3 = metrics.ParallaxMetric(normalize=flag, rmag=22.,
40 seeingCol='finSeeing').run(data[0:300], slicePoint)
41 data['fiveSigmaDepth'] = data['fiveSigmaDepth']-1.
42 worse4 = metrics.ParallaxMetric(normalize=flag, rmag=22.,
43 seeingCol='finSeeing').run(data[0:300], slicePoint)
44 # Make sure the RMS increases as seeing increases, the star gets fainter,
45 # the background gets brighter, or the baseline decreases.
46 if flag:
47 pass
48 else:
49 assert(worse1 > baseline)
50 assert(worse2 > worse1)
51 assert(worse3 > worse2)
52 assert(worse4 > worse3)
54 def testProperMotionMetric(self):
55 """
56 Test the ProperMotion metric.
57 """
58 names = ['observationStartMJD', 'finSeeing', 'fiveSigmaDepth', 'fieldRA', 'fieldDec', 'filter']
59 types = [float, float, float, float, float, (np.str_, 1)]
60 data = np.zeros(700, dtype=list(zip(names, types)))
61 slicePoint = [0]
62 stacker = stackers.ParallaxFactorStacker()
63 normFlags = [False, True]
64 data['observationStartMJD'] = np.arange(700)+56762
65 data['finSeeing'] = 0.7
66 data['filter'][0:100] = str('r')
67 data['filter'][100:200] = str('u')
68 data['filter'][200:] = str('g')
69 data['fiveSigmaDepth'] = 24.
70 data = stacker.run(data)
71 for flag in normFlags:
72 data['finSeeing'] = 0.7
73 data['fiveSigmaDepth'] = 24
74 baseline = metrics.ProperMotionMetric(normalize=flag, seeingCol='finSeeing').run(data, slicePoint)
75 data['finSeeing'] = data['finSeeing']+.3
76 worse1 = metrics.ProperMotionMetric(normalize=flag, seeingCol='finSeeing').run(data, slicePoint)
77 worse2 = metrics.ProperMotionMetric(normalize=flag, rmag=22.,
78 seeingCol='finSeeing').run(data, slicePoint)
79 worse3 = metrics.ProperMotionMetric(normalize=flag, rmag=22.,
80 seeingCol='finSeeing').run(data[0:300], slicePoint)
81 data['fiveSigmaDepth'] = data['fiveSigmaDepth']-1.
82 worse4 = metrics.ProperMotionMetric(normalize=flag, rmag=22.,
83 seeingCol='finSeeing').run(data[0:300], slicePoint)
84 # Make sure the RMS increases as seeing increases, the star gets fainter,
85 # the background gets brighter, or the baseline decreases.
86 if flag:
87 # When normalized, mag of star and m5 don't matter (just scheduling).
88 self.assertAlmostEqual(worse2, worse1)
89 self.assertAlmostEqual(worse4, worse3)
90 # But using fewer points should make proper motion worse.
91 # survey assumed to have same seeing and limiting mags.
92 assert(worse3 < worse2)
93 else:
94 assert(worse1 > baseline)
95 assert(worse2 > worse1)
96 assert(worse3 > worse2)
97 assert(worse4 > worse3)
99 def testParallaxCoverageMetric(self):
100 """
101 Test the parallax coverage
102 """
103 names = ['observationStartMJD', 'finSeeing', 'fiveSigmaDepth', 'fieldRA', 'fieldDec',
104 'filter', 'ra_pi_amp', 'dec_pi_amp']
105 types = [float, float, float, float, float, '<U1', float, float]
106 data = np.zeros(100, dtype=list(zip(names, types)))
107 data['filter'] = 'r'
108 data['fiveSigmaDepth'] = 25.
109 data['ra_pi_amp'] = 1.
110 data['dec_pi_amp'] = 1.
112 # All the parallax amplitudes are the same, should return zero
113 metric = metrics.ParallaxCoverageMetric(seeingCol='finSeeing')
114 val = metric.run(data)
115 assert(val == 0)
117 # Half at (1,1), half at (0.5,0.5)
118 data['ra_pi_amp'][0:50] = 1
119 data['dec_pi_amp'][0:50] = 1
120 data['ra_pi_amp'][50:] = -1
121 data['dec_pi_amp'][50:] = -1
122 val = metric.run(data)
123 self.assertAlmostEqual(val, 2.**0.5)
125 data['ra_pi_amp'][0:50] = 0.5
126 data['dec_pi_amp'][0:50] = 0.5
127 data['ra_pi_amp'][50:] = -0.5
128 data['dec_pi_amp'][50:] = -0.5
129 val = metric.run(data)
130 self.assertAlmostEqual(val, 0.5*2**0.5)
132 data['ra_pi_amp'][0:50] = 1
133 data['dec_pi_amp'][0:50] = 0
134 data['ra_pi_amp'][50:] = -1
135 data['dec_pi_amp'][50:] = 0
136 val = metric.run(data)
137 assert(val == 1)
139 def testParallaxDcrDegenMetric(self):
140 """
141 Test the parallax-DCR degeneracy metric
142 """
143 names = ['observationStartMJD', 'finSeeing', 'fiveSigmaDepth', 'fieldRA', 'fieldDec',
144 'filter', 'ra_pi_amp', 'dec_pi_amp', 'ra_dcr_amp', 'dec_dcr_amp']
145 types = [float, float, float, float, float, '<U1', float,
146 float, float, float]
147 data = np.zeros(100, dtype=list(zip(names, types)))
148 data['filter'] = 'r'
149 data['fiveSigmaDepth'] = 25.
151 # Set so ra is perfecly correlated
152 data['ra_pi_amp'] = 1.
153 data['dec_pi_amp'] = 0.01
154 data['ra_dcr_amp'] = 0.2
156 metric = metrics.ParallaxDcrDegenMetric(seeingCol='finSeeing')
157 val = metric.run(data)
158 np.testing.assert_almost_equal(np.abs(val), 1., decimal=2)
160 # set so the offsets are always nearly perpendicular
161 data['ra_pi_amp'] = 0.001
162 data['dec_pi_amp'] = 1.
163 data['ra_dcr_amp'] = 0.2
165 metric = metrics.ParallaxDcrDegenMetric(seeingCol='finSeeing')
166 val = metric.run(data)
167 np.testing.assert_almost_equal(val, 0., decimal=2)
169 # Generate a random distribution that should have little or no correlation
170 rng = np.random.RandomState(42)
172 data['ra_pi_amp'] = rng.rand(100)*2-1.
173 data['dec_pi_amp'] = rng.rand(100)*2-1.
174 data['ra_dcr_amp'] = rng.rand(100)*2-1.
175 data['dec_dcr_amp'] = rng.rand(100)*2-1.
177 val = metric.run(data)
178 assert(np.abs(val) < 0.2)
180 def testRadiusObsMetric(self):
181 """
182 Test the RadiusObsMetric
183 """
185 names = ['fieldRA', 'fieldDec']
186 dt = ['float']*2
187 data = np.zeros(3, dtype=list(zip(names, dt)))
188 data['fieldDec'] = [-.1, 0, .1]
189 slicePoint = {'ra': 0., 'dec': 0.}
190 metric = metrics.RadiusObsMetric()
191 result = metric.run(data, slicePoint)
192 for i, r in enumerate(result):
193 np.testing.assert_almost_equal(r, abs(data['fieldDec'][i]))
194 assert(metric.reduceMean(result) == np.mean(result))
195 assert(metric.reduceRMS(result) == np.std(result))
196 np.testing.assert_almost_equal(metric.reduceFullRange(result),
197 np.max(np.abs(data['fieldDec']))-np.min(np.abs(data['fieldDec'])))
200class TestMemory(lsst.utils.tests.MemoryTestCase):
201 pass
204def setup_module(module):
205 lsst.utils.tests.init()
208if __name__ == "__main__": 208 ↛ 209line 208 didn't jump to line 209, because the condition on line 208 was never true
209 lsst.utils.tests.init()
210 unittest.main()