Coverage for tests/test_commonMetrics.py : 29%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of verify.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import time
23import unittest
24import warnings
26import astropy.units as u
28import lsst.utils.tests
29from lsst.pex.config import Config
30from lsst.pipe.base import Task
31from lsst.utils.timer import timeMethod
33from lsst.verify import Measurement, Name
34from lsst.verify.gen2tasks.testUtils import MetricTaskTestCase
35from lsst.verify.tasks import MetricComputationError, TimingMetricTask, \
36 MemoryMetricTask
37from lsst.verify.tasks.testUtils import MetadataMetricTestCase
40class DummyTask(Task):
41 ConfigClass = Config
42 _DefaultName = "NotARealTask"
43 taskLength = 0.1
45 @timeMethod
46 def run(self):
47 time.sleep(self.taskLength)
50class TimingMetricTestSuite(MetadataMetricTestCase):
51 @classmethod
52 def makeTask(cls):
53 return TimingMetricTask(config=cls._standardConfig())
55 @staticmethod
56 def _standardConfig():
57 config = TimingMetricTask.ConfigClass()
58 config.connections.labelName = DummyTask._DefaultName
59 config.target = DummyTask._DefaultName + ".run"
60 config.connections.package = "verify"
61 config.connections.metric = "DummyTime"
62 return config
64 def setUp(self):
65 super().setUp()
66 self.config = TimingMetricTestSuite._standardConfig()
67 self.metric = Name("verify.DummyTime")
69 self.scienceTask = DummyTask()
70 self.scienceTask.run()
72 def testValid(self):
73 result = self.task.run(self.scienceTask.getFullMetadata())
74 lsst.pipe.base.testUtils.assertValidOutput(self.task, result)
75 meas = result.measurement
77 self.assertIsInstance(meas, Measurement)
78 self.assertEqual(meas.metric_name, self.metric)
79 self.assertGreater(meas.quantity, 0.0 * u.second)
80 self.assertLess(meas.quantity, 2 * DummyTask.taskLength * u.second)
82 def testNoMetric(self):
83 self.config.connections.package = "foo.bar"
84 self.config.connections.metric = "FooBarTime"
85 task = TimingMetricTask(config=self.config)
86 with self.assertRaises(TypeError):
87 task.run(self.scienceTask.getFullMetadata())
89 def testMissingData(self):
90 result = self.task.run(None)
91 lsst.pipe.base.testUtils.assertValidOutput(self.task, result)
92 meas = result.measurement
93 self.assertIsNone(meas)
95 def testRunDifferentMethod(self):
96 self.config.target = DummyTask._DefaultName + ".runDataRef"
97 task = TimingMetricTask(config=self.config)
98 result = task.run(self.scienceTask.getFullMetadata())
99 lsst.pipe.base.testUtils.assertValidOutput(task, result)
100 meas = result.measurement
101 self.assertIsNone(meas)
103 def testNonsenseKeys(self):
104 metadata = self.scienceTask.getFullMetadata()
105 startKeys = [key
106 for key in metadata.paramNames(topLevelOnly=False)
107 if "StartCpuTime" in key]
108 for key in startKeys:
109 metadata.remove(key)
111 task = TimingMetricTask(config=self.config)
112 with self.assertRaises(MetricComputationError):
113 task.run(metadata)
115 def testBadlyTypedKeys(self):
116 metadata = self.scienceTask.getFullMetadata()
117 endKeys = [key
118 for key in metadata.paramNames(topLevelOnly=False)
119 if "EndCpuTime" in key]
120 for key in endKeys:
121 metadata.set(key, str(metadata.getAsDouble(key)))
123 task = TimingMetricTask(config=self.config)
124 with self.assertRaises(MetricComputationError):
125 task.run(metadata)
127 def testDeprecated(self):
128 with warnings.catch_warnings(record=True):
129 self.config.metric = "verify.DummyTime"
130 self.config.connections.package = ""
131 self.config.connections.metric = ""
132 with warnings.catch_warnings(record=True) as emitted:
133 self.config.validate()
134 self.assertEqual(len(emitted), 1)
135 self.assertEqual(emitted[0].category, FutureWarning)
136 self.assertEqual(self.config.connections.package, "verify")
137 self.assertEqual(self.config.connections.metric, "DummyTime")
140class MemoryMetricTestSuite(MetadataMetricTestCase):
141 @classmethod
142 def makeTask(cls):
143 return MemoryMetricTask(config=cls._standardConfig())
145 @staticmethod
146 def _standardConfig():
147 config = MemoryMetricTask.ConfigClass()
148 config.connections.labelName = DummyTask._DefaultName
149 config.target = DummyTask._DefaultName + ".run"
150 config.connections.package = "verify"
151 config.connections.metric = "DummyMemory"
152 return config
154 def setUp(self):
155 super().setUp()
156 self.config = self._standardConfig()
157 self.metric = Name("verify.DummyMemory")
159 self.scienceTask = DummyTask()
160 self.scienceTask.run()
162 def testValid(self):
163 result = self.task.run(self.scienceTask.getFullMetadata())
164 lsst.pipe.base.testUtils.assertValidOutput(self.task, result)
165 meas = result.measurement
167 self.assertIsInstance(meas, Measurement)
168 self.assertEqual(meas.metric_name, self.metric)
169 self.assertGreater(meas.quantity, 0.0 * u.byte)
171 def testNoMetric(self):
172 self.config.connections.package = "foo.bar"
173 self.config.connections.metric = "FooBarMemory"
174 task = MemoryMetricTask(config=self.config)
175 with self.assertRaises(TypeError):
176 task.run(self.scienceTask.getFullMetadata())
178 def testMissingData(self):
179 result = self.task.run(None)
180 lsst.pipe.base.testUtils.assertValidOutput(self.task, result)
181 meas = result.measurement
182 self.assertIsNone(meas)
184 def testRunDifferentMethod(self):
185 self.config.target = DummyTask._DefaultName + ".runDataRef"
186 task = MemoryMetricTask(config=self.config)
187 result = task.run(self.scienceTask.getFullMetadata())
188 lsst.pipe.base.testUtils.assertValidOutput(task, result)
189 meas = result.measurement
190 self.assertIsNone(meas)
192 def testBadlyTypedKeys(self):
193 metadata = self.scienceTask.getFullMetadata()
194 endKeys = [key
195 for key in metadata.paramNames(topLevelOnly=False)
196 if "EndMaxResidentSetSize" in key]
197 for key in endKeys:
198 metadata.set(key, str(metadata.getAsDouble(key)))
200 task = MemoryMetricTask(config=self.config)
201 with self.assertRaises(MetricComputationError):
202 task.run(metadata)
204 def testDeprecated(self):
205 with warnings.catch_warnings(record=True):
206 self.config.metric = "verify.DummyMemory"
207 self.config.connections.package = ""
208 self.config.connections.metric = ""
209 with warnings.catch_warnings(record=True) as emitted:
210 self.config.validate()
211 self.assertEqual(len(emitted), 1)
212 self.assertEqual(emitted[0].category, FutureWarning)
213 self.assertEqual(self.config.connections.package, "verify")
214 self.assertEqual(self.config.connections.metric, "DummyMemory")
217# Hack around unittest's hacky test setup system
218del MetricTaskTestCase
219del MetadataMetricTestCase
222class MemoryTester(lsst.utils.tests.MemoryTestCase):
223 pass
226def setup_module(module):
227 lsst.utils.tests.init()
230if __name__ == "__main__": 230 ↛ 231line 230 didn't jump to line 231, because the condition on line 230 was never true
231 lsst.utils.tests.init()
232 unittest.main()