Coverage for tests/test_dcr_assemble_coadd.py: 19%
80 statements
« prev ^ index » next coverage.py v7.3.3, created at 2023-12-16 15:07 +0000
« prev ^ index » next coverage.py v7.3.3, created at 2023-12-16 15:07 +0000
1# This file is part of drp_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import unittest
24import lsst.utils.tests
25from lsst.drp.tasks.dcr_assemble_coadd import DcrAssembleCoaddConfig, DcrAssembleCoaddTask
28class DcrAssembleCoaddCalculateGainTestCase(lsst.utils.tests.TestCase):
29 """Tests of dcrAssembleCoaddTask.calculateGain()."""
31 def setUp(self):
32 self.baseGain = 0.5
33 self.gainList = [self.baseGain, self.baseGain]
34 self.convergenceList = [0.2]
35 # Calculate the convergence we would expect if the model was converging
36 # perfectly, so that the improvement is limited only by our
37 # conservative gain.
38 for i in range(2):
39 self.convergenceList.append(self.convergenceList[i] / (self.baseGain + 1))
40 self.nextGain = (1 + self.baseGain) / 2
42 self.config = DcrAssembleCoaddConfig()
43 self.config.effectiveWavelength = 500.0
44 self.config.bandwidth = 100.0
45 self.task = DcrAssembleCoaddTask(config=self.config)
47 def testUnbalancedLists(self):
48 gainList = [1, 2, 3, 4]
49 convergenceList = [1, 2]
50 with self.assertRaises(ValueError):
51 self.task.calculateGain(convergenceList, gainList)
53 def testNoProgressiveGain(self):
54 self.config.useProgressiveGain = False
55 self.config.baseGain = self.baseGain
56 expectGain = self.baseGain
57 expectGainList = self.gainList + [expectGain]
58 result = self.task.calculateGain(self.convergenceList, self.gainList)
59 self.assertEqual(result, expectGain)
60 self.assertEqual(self.gainList, expectGainList)
62 def testBaseGainNone(self):
63 """If baseGain is None, gain is calculated from the default values."""
64 self.config.useProgressiveGain = False
65 expectGain = 1 / (self.config.dcrNumSubfilters - 1)
66 expectGainList = self.gainList + [expectGain]
67 result = self.task.calculateGain(self.convergenceList, self.gainList)
68 self.assertEqual(result, expectGain)
69 self.assertEqual(self.gainList, expectGainList)
71 def testProgressiveFirstStep(self):
72 """The first and second steps always return baseGain."""
73 convergenceList = self.convergenceList[:1]
74 gainList = []
75 self.config.baseGain = self.baseGain
76 expectGain = self.baseGain
77 expectGainList = [expectGain]
78 result = self.task.calculateGain(convergenceList, gainList)
79 self.assertEqual(result, expectGain)
80 self.assertEqual(gainList, expectGainList)
82 def testProgressiveSecondStep(self):
83 """The first and second steps always return baseGain."""
84 convergenceList = self.convergenceList[:2]
85 gainList = self.gainList[:1]
86 self.config.baseGain = self.baseGain
87 expectGain = self.baseGain
88 expectGainList = gainList + [expectGain]
89 result = self.task.calculateGain(convergenceList, gainList)
90 self.assertEqual(result, expectGain)
91 self.assertEqual(gainList, expectGainList)
93 def testProgressiveGain(self):
94 """Test that gain follows the "perfect" situation defined in setUp."""
95 self.config.baseGain = self.baseGain
96 expectGain = self.nextGain
97 expectGainList = self.gainList + [expectGain]
98 result = self.task.calculateGain(self.convergenceList, self.gainList)
99 self.assertFloatsAlmostEqual(result, expectGain)
100 self.assertEqual(self.gainList, expectGainList)
102 def testProgressiveGainBadFit(self):
103 """Test that gain is reduced if the predicted convergence does not
104 match the measured convergence (in this case, converging too quickly).
105 """
106 wrongGain = 1.0
107 gainList = [self.baseGain, self.baseGain]
108 convergenceList = [0.2]
109 for i in range(2):
110 convergenceList.append(convergenceList[i] / (wrongGain + 1))
111 # The below math is a simplified version of the full algorithm,
112 # assuming the predicted convergence is zero.
113 # Note that in this case, nextGain is smaller than wrongGain.
114 nextGain = (self.baseGain + (1 + self.baseGain) / (1 + wrongGain)) / 2
116 self.config.baseGain = self.baseGain
117 expectGain = nextGain
118 expectGainList = self.gainList + [expectGain]
119 result = self.task.calculateGain(convergenceList, gainList)
120 self.assertFloatsAlmostEqual(result, nextGain)
121 self.assertEqual(gainList, expectGainList)
124def setup_module(module):
125 lsst.utils.tests.init()
128class MatchMemoryTestCase(lsst.utils.tests.MemoryTestCase):
129 pass
132if __name__ == "__main__": 132 ↛ 133line 132 didn't jump to line 133, because the condition on line 132 was never true
133 lsst.utils.tests.init()
134 unittest.main()