Coverage for tests/test_dcrAssembleCoadd.py: 23%
80 statements
« prev ^ index » next coverage.py v6.4.4, created at 2022-08-20 09:51 +0000
« prev ^ index » next coverage.py v6.4.4, created at 2022-08-20 09:51 +0000
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import unittest
24import lsst.utils.tests
26from lsst.pipe.tasks.dcrAssembleCoadd import DcrAssembleCoaddTask, DcrAssembleCoaddConfig
29class DcrAssembleCoaddCalculateGainTestCase(lsst.utils.tests.TestCase):
30 """Tests of dcrAssembleCoaddTask.calculateGain()."""
31 def setUp(self):
32 self.baseGain = 0.5
33 self.gainList = [self.baseGain, self.baseGain]
34 self.convergenceList = [0.2]
35 # Calculate the convergence we would expect if the model was converging perfectly,
36 # so that the improvement is limited only by our conservative gain.
37 for i in range(2):
38 self.convergenceList.append(self.convergenceList[i]/(self.baseGain + 1))
39 self.nextGain = (1 + self.baseGain) / 2
41 self.config = DcrAssembleCoaddConfig()
42 self.config.effectiveWavelength = 500.0
43 self.config.bandwidth = 100.0
44 self.task = DcrAssembleCoaddTask(config=self.config)
46 def testUnbalancedLists(self):
47 gainList = [1, 2, 3, 4]
48 convergenceList = [1, 2]
49 with self.assertRaises(ValueError):
50 self.task.calculateGain(convergenceList, gainList)
52 def testNoProgressiveGain(self):
53 self.config.useProgressiveGain = False
54 self.config.baseGain = self.baseGain
55 expectGain = self.baseGain
56 expectGainList = self.gainList + [expectGain]
57 result = self.task.calculateGain(self.convergenceList, self.gainList)
58 self.assertEqual(result, expectGain)
59 self.assertEqual(self.gainList, expectGainList)
61 def testBaseGainNone(self):
62 """If baseGain is None, gain is calculated from the default values."""
63 self.config.useProgressiveGain = False
64 expectGain = 1 / (self.config.dcrNumSubfilters - 1)
65 expectGainList = self.gainList + [expectGain]
66 result = self.task.calculateGain(self.convergenceList, self.gainList)
67 self.assertEqual(result, expectGain)
68 self.assertEqual(self.gainList, expectGainList)
70 def testProgressiveFirstStep(self):
71 """The first and second steps always return baseGain."""
72 convergenceList = self.convergenceList[:1]
73 gainList = []
74 self.config.baseGain = self.baseGain
75 expectGain = self.baseGain
76 expectGainList = [expectGain]
77 result = self.task.calculateGain(convergenceList, gainList)
78 self.assertEqual(result, expectGain)
79 self.assertEqual(gainList, expectGainList)
81 def testProgressiveSecondStep(self):
82 """The first and second steps always return baseGain."""
83 convergenceList = self.convergenceList[:2]
84 gainList = self.gainList[:1]
85 self.config.baseGain = self.baseGain
86 expectGain = self.baseGain
87 expectGainList = gainList + [expectGain]
88 result = self.task.calculateGain(convergenceList, gainList)
89 self.assertEqual(result, expectGain)
90 self.assertEqual(gainList, expectGainList)
92 def testProgressiveGain(self):
93 """Test that gain follows the "perfect" situation defined in setUp."""
94 self.config.baseGain = self.baseGain
95 expectGain = self.nextGain
96 expectGainList = self.gainList + [expectGain]
97 result = self.task.calculateGain(self.convergenceList, self.gainList)
98 self.assertFloatsAlmostEqual(result, expectGain)
99 self.assertEqual(self.gainList, expectGainList)
101 def testProgressiveGainBadFit(self):
102 """Test that gain is reduced if the predicted convergence does not
103 match the measured convergence (in this case, converging too quickly).
104 """
105 wrongGain = 1.0
106 gainList = [self.baseGain, self.baseGain]
107 convergenceList = [0.2]
108 for i in range(2):
109 convergenceList.append(convergenceList[i]/(wrongGain + 1))
110 # The below math is a simplified version of the full algorithm,
111 # assuming the predicted convergence is zero.
112 # Note that in this case, nextGain is smaller than wrongGain.
113 nextGain = (self.baseGain + (1 + self.baseGain) / (1 + wrongGain)) / 2
115 self.config.baseGain = self.baseGain
116 expectGain = nextGain
117 expectGainList = self.gainList + [expectGain]
118 result = self.task.calculateGain(convergenceList, gainList)
119 self.assertFloatsAlmostEqual(result, nextGain)
120 self.assertEqual(gainList, expectGainList)
123def setup_module(module):
124 lsst.utils.tests.init()
127class MatchMemoryTestCase(lsst.utils.tests.MemoryTestCase):
128 pass
131if __name__ == "__main__": 131 ↛ 132line 131 didn't jump to line 132, because the condition on line 131 was never true
132 lsst.utils.tests.init()
133 unittest.main()