Coverage for tests/test_dcrAssembleCoadd.py: 23%

78 statements  

« prev     ^ index     » next       coverage.py v6.4.1, created at 2022-07-09 06:58 -0700

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import unittest 

23 

24import lsst.utils.tests 

25 

26from lsst.pipe.tasks.dcrAssembleCoadd import DcrAssembleCoaddTask, DcrAssembleCoaddConfig 

27 

28 

29class DcrAssembleCoaddCalculateGainTestCase(lsst.utils.tests.TestCase): 

30 """Tests of dcrAssembleCoaddTask.calculateGain().""" 

31 def setUp(self): 

32 self.baseGain = 0.5 

33 self.gainList = [self.baseGain, self.baseGain] 

34 self.convergenceList = [0.2] 

35 # Calculate the convergence we would expect if the model was converging perfectly, 

36 # so that the improvement is limited only by our conservative gain. 

37 for i in range(2): 

38 self.convergenceList.append(self.convergenceList[i]/(self.baseGain + 1)) 

39 self.nextGain = (1 + self.baseGain) / 2 

40 

41 self.config = DcrAssembleCoaddConfig() 

42 self.task = DcrAssembleCoaddTask(config=self.config) 

43 

44 def testUnbalancedLists(self): 

45 gainList = [1, 2, 3, 4] 

46 convergenceList = [1, 2] 

47 with self.assertRaises(ValueError): 

48 self.task.calculateGain(convergenceList, gainList) 

49 

50 def testNoProgressiveGain(self): 

51 self.config.useProgressiveGain = False 

52 self.config.baseGain = self.baseGain 

53 expectGain = self.baseGain 

54 expectGainList = self.gainList + [expectGain] 

55 result = self.task.calculateGain(self.convergenceList, self.gainList) 

56 self.assertEqual(result, expectGain) 

57 self.assertEqual(self.gainList, expectGainList) 

58 

59 def testBaseGainNone(self): 

60 """If baseGain is None, gain is calculated from the default values.""" 

61 self.config.useProgressiveGain = False 

62 expectGain = 1 / (self.config.dcrNumSubfilters - 1) 

63 expectGainList = self.gainList + [expectGain] 

64 result = self.task.calculateGain(self.convergenceList, self.gainList) 

65 self.assertEqual(result, expectGain) 

66 self.assertEqual(self.gainList, expectGainList) 

67 

68 def testProgressiveFirstStep(self): 

69 """The first and second steps always return baseGain.""" 

70 convergenceList = self.convergenceList[:1] 

71 gainList = [] 

72 self.config.baseGain = self.baseGain 

73 expectGain = self.baseGain 

74 expectGainList = [expectGain] 

75 result = self.task.calculateGain(convergenceList, gainList) 

76 self.assertEqual(result, expectGain) 

77 self.assertEqual(gainList, expectGainList) 

78 

79 def testProgressiveSecondStep(self): 

80 """The first and second steps always return baseGain.""" 

81 convergenceList = self.convergenceList[:2] 

82 gainList = self.gainList[:1] 

83 self.config.baseGain = self.baseGain 

84 expectGain = self.baseGain 

85 expectGainList = gainList + [expectGain] 

86 result = self.task.calculateGain(convergenceList, gainList) 

87 self.assertEqual(result, expectGain) 

88 self.assertEqual(gainList, expectGainList) 

89 

90 def testProgressiveGain(self): 

91 """Test that gain follows the "perfect" situation defined in setUp.""" 

92 self.config.baseGain = self.baseGain 

93 expectGain = self.nextGain 

94 expectGainList = self.gainList + [expectGain] 

95 result = self.task.calculateGain(self.convergenceList, self.gainList) 

96 self.assertFloatsAlmostEqual(result, expectGain) 

97 self.assertEqual(self.gainList, expectGainList) 

98 

99 def testProgressiveGainBadFit(self): 

100 """Test that gain is reduced if the predicted convergence does not 

101 match the measured convergence (in this case, converging too quickly). 

102 """ 

103 wrongGain = 1.0 

104 gainList = [self.baseGain, self.baseGain] 

105 convergenceList = [0.2] 

106 for i in range(2): 

107 convergenceList.append(convergenceList[i]/(wrongGain + 1)) 

108 # The below math is a simplified version of the full algorithm, 

109 # assuming the predicted convergence is zero. 

110 # Note that in this case, nextGain is smaller than wrongGain. 

111 nextGain = (self.baseGain + (1 + self.baseGain) / (1 + wrongGain)) / 2 

112 

113 self.config.baseGain = self.baseGain 

114 expectGain = nextGain 

115 expectGainList = self.gainList + [expectGain] 

116 result = self.task.calculateGain(convergenceList, gainList) 

117 self.assertFloatsAlmostEqual(result, nextGain) 

118 self.assertEqual(gainList, expectGainList) 

119 

120 

121def setup_module(module): 

122 lsst.utils.tests.init() 

123 

124 

125class MatchMemoryTestCase(lsst.utils.tests.MemoryTestCase): 

126 pass 

127 

128 

129if __name__ == "__main__": 129 ↛ 130line 129 didn't jump to line 130, because the condition on line 129 was never true

130 lsst.utils.tests.init() 

131 unittest.main()