Coverage for tests/test_footprintMergeCatalog.py : 7%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1#
2# LSST Data Management System
3# Copyright 2008, 2009, 2010 LSST Corporation.
4#
5# This product includes software developed by the
6# LSST Project (http://www.lsst.org/).
7#
8# This program is free software: you can redistribute it and/or modify
9# it under the terms of the GNU General Public License as published by
10# the Free Software Foundation, either version 3 of the License, or
11# (at your option) any later version.
12#
13# This program is distributed in the hope that it will be useful,
14# but WITHOUT ANY WARRANTY; without even the implied warranty of
15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16# GNU General Public License for more details.
17#
18# You should have received a copy of the LSST License Statement and
19# the GNU General Public License along with this program. If not,
20# see <http://www.lsstcorp.org/LegalNotices/>.
21#
22import unittest
24import numpy as np
26import lsst.utils.tests
27import lsst.pex.exceptions
28import lsst.geom
29import lsst.afw.image as afwImage
30import lsst.afw.detection as afwDetect
31import lsst.afw.table as afwTable
34def insertPsf(pos, im, psf, kernelSize, flux):
35 for x, y in pos:
36 x0 = x-kernelSize//2
37 y0 = y-kernelSize//2
38 tmpbox = lsst.geom.Box2I(lsst.geom.Point2I(x0, y0),
39 lsst.geom.Extent2I(kernelSize, kernelSize))
40 tmp = psf.computeImage(lsst.geom.Point2D(x0, y0))
41 tmp *= flux
42 im.image[tmpbox, afwImage.LOCAL] += tmp
45def mergeCatalogs(catList, names, peakDist, idFactory, indivNames=[], samePeakDist=-1.):
46 schema = afwTable.SourceTable.makeMinimalSchema()
47 merged = afwDetect.FootprintMergeList(schema, names)
49 if not indivNames:
50 indivNames = names
52 # Count the number of objects and peaks in this list
53 mergedList = merged.getMergedSourceCatalog(catList, indivNames, peakDist,
54 schema, idFactory, samePeakDist)
55 nob = len(mergedList)
56 npeaks = sum([len(ob.getFootprint().getPeaks()) for ob in mergedList])
58 return mergedList, nob, npeaks
61def isPeakInCatalog(peak, catalog):
62 for record in catalog:
63 for p in record.getFootprint().getPeaks():
64 if p.getI() == peak.getI():
65 return True
66 return False
69class FootprintMergeCatalogTestCase(lsst.utils.tests.TestCase):
71 def setUp(self):
72 """Build up three different sets of objects that are to be merged"""
73 pos1 = [(40, 40), (220, 35), (40, 48), (220, 50),
74 (67, 67), (150, 50), (40, 90), (70, 160),
75 (35, 255), (70, 180), (250, 200), (120, 120),
76 (170, 180), (100, 210), (20, 210),
77 ]
78 pos2 = [(43, 45), (215, 31), (171, 258), (211, 117),
79 (48, 99), (70, 160), (125, 45), (251, 33),
80 (37, 170), (134, 191), (79, 223), (258, 182)
81 ]
82 pos3 = [(70, 170), (219, 41), (253, 173), (253, 192)]
84 box = lsst.geom.Box2I(lsst.geom.Point2I(0, 0), lsst.geom.Point2I(300, 300))
85 psfsig = 1.
86 kernelSize = 41
87 flux = 1000
89 # Create a different sized psf for each image and insert them at the
90 # desired positions
91 im1 = afwImage.MaskedImageD(box)
92 psf1 = afwDetect.GaussianPsf(kernelSize, kernelSize, psfsig)
94 im2 = afwImage.MaskedImageD(box)
95 psf2 = afwDetect.GaussianPsf(kernelSize, kernelSize, 2*psfsig)
97 im3 = afwImage.MaskedImageD(box)
98 psf3 = afwDetect.GaussianPsf(kernelSize, kernelSize, 1.3*psfsig)
100 insertPsf(pos1, im1, psf1, kernelSize, flux)
101 insertPsf(pos2, im2, psf2, kernelSize, flux)
102 insertPsf(pos3, im3, psf3, kernelSize, flux)
104 schema = afwTable.SourceTable.makeMinimalSchema()
105 self.idFactory = afwTable.IdFactory.makeSimple()
106 self.table = afwTable.SourceTable.make(schema, self.idFactory)
108 # Create SourceCatalogs from these objects
109 fp1 = afwDetect.FootprintSet(
110 im1, afwDetect.Threshold(0.001), "DETECTED")
111 self.catalog1 = afwTable.SourceCatalog(self.table)
112 fp1.makeSources(self.catalog1)
114 fp2 = afwDetect.FootprintSet(
115 im2, afwDetect.Threshold(0.001), "DETECTED")
116 self.catalog2 = afwTable.SourceCatalog(self.table)
117 fp2.makeSources(self.catalog2)
119 fp3 = afwDetect.FootprintSet(
120 im3, afwDetect.Threshold(0.001), "DETECTED")
121 self.catalog3 = afwTable.SourceCatalog(self.table)
122 fp3.makeSources(self.catalog3)
124 def tearDown(self):
125 del self.catalog1
126 del self.catalog2
127 del self.catalog3
128 del self.table
130 def testMerge1(self):
131 # Add the first catalog only
132 merge, nob, npeak = mergeCatalogs([self.catalog1], ["1"], [-1],
133 self.idFactory)
134 self.assertEqual(nob, 14)
135 self.assertEqual(npeak, 15)
137 for record in merge:
138 self.assertTrue(record.get("merge_footprint_1"))
139 for peak in record.getFootprint().getPeaks():
140 self.assertTrue(peak.get("merge_peak_1"))
142 # area for each object
143 pixArea = np.empty(14)
144 pixArea.fill(69)
145 pixArea[1] = 135
146 measArea = [i.getFootprint().getArea() for i in merge]
147 np.testing.assert_array_equal(pixArea, measArea)
149 # Add the first catalog and second catalog with the wrong name, which should result
150 # an exception being raised
151 with self.assertRaises(lsst.pex.exceptions.LogicError):
152 mergeCatalogs([self.catalog1, self.catalog2], ["1", "2"], [
153 0, 0], self.idFactory, ["1", "3"])
155 # Add the first catalog and second catalog with the wrong number of peakDist elements,
156 # which should raise an exception
157 with self.assertRaises(ValueError):
158 mergeCatalogs([self.catalog1, self.catalog2], [
159 "1", "2"], [0], self.idFactory, ["1", "3"])
161 # Add the first catalog and second catalog with the wrong number of filters,
162 # which should raise an exception
163 with self.assertRaises(ValueError):
164 mergeCatalogs([self.catalog1, self.catalog2], [
165 "1"], [0], self.idFactory, ["1", "3"])
167 # Add the first catalog and second catalog with minPeak < 1 so it will
168 # not add new peaks
169 merge, nob, npeak = mergeCatalogs([self.catalog1, self.catalog2],
170 ["1", "2"], [0, -1],
171 self.idFactory)
172 self.assertEqual(nob, 22)
173 self.assertEqual(npeak, 23)
174 # area for each object
175 pixArea = np.ones(22)
176 pixArea[0] = 275
177 pixArea[1] = 270
178 pixArea[2:5].fill(69)
179 pixArea[5] = 323
180 pixArea[6] = 69
181 pixArea[7] = 261
182 pixArea[8:14].fill(69)
183 pixArea[14:22].fill(261)
184 measArea = [i.getFootprint().getArea() for i in merge]
185 np.testing.assert_array_equal(pixArea, measArea)
187 for record in merge:
188 for peak in record.getFootprint().getPeaks():
189 # Should only get peaks from catalog2 if catalog1 didn't
190 # contribute to the footprint
191 if record.get("merge_footprint_1"):
192 self.assertTrue(peak.get("merge_peak_1"))
193 self.assertFalse(peak.get("merge_peak_2"))
194 else:
195 self.assertFalse(peak.get("merge_peak_1"))
196 self.assertTrue(peak.get("merge_peak_2"))
198 # Same as previous with another catalog
199 merge, nob, npeak = mergeCatalogs([self.catalog1, self.catalog2, self.catalog3],
200 ["1", "2", "3"], [0, -1, -1],
201 self.idFactory)
202 self.assertEqual(nob, 19)
203 self.assertEqual(npeak, 20)
204 pixArea = np.ones(19)
205 pixArea[0] = 416
206 pixArea[1] = 270
207 pixArea[2:4].fill(69)
208 pixArea[4] = 323
209 pixArea[5] = 69
210 pixArea[6] = 406
211 pixArea[7] = 69
212 pixArea[8] = 493
213 pixArea[9:13].fill(69)
214 pixArea[12:19].fill(261)
215 measArea = [i.getFootprint().getArea() for i in merge]
216 np.testing.assert_array_equal(pixArea, measArea)
218 for record in merge:
219 for peak in record.getFootprint().getPeaks():
220 # Should only get peaks from catalog2 if catalog1 didn't
221 # contribute to the footprint
222 if record.get("merge_footprint_1"):
223 self.assertTrue(peak.get("merge_peak_1"))
224 self.assertFalse(peak.get("merge_peak_2"))
225 else:
226 self.assertFalse(peak.get("merge_peak_1"))
227 self.assertTrue(peak.get("merge_peak_2"))
229 # Add all the catalogs with minPeak = 0 so all peaks will be added
230 merge, nob, npeak = mergeCatalogs([self.catalog1, self.catalog2, self.catalog3],
231 ["1", "2", "3"], [0, 0, 0],
232 self.idFactory)
233 self.assertEqual(nob, 19)
234 self.assertEqual(npeak, 30)
235 measArea = [i.getFootprint().getArea() for i in merge]
236 np.testing.assert_array_equal(pixArea, measArea)
238 # Add all the catalogs with minPeak = 10 so some peaks will be added to
239 # the footprint
240 merge, nob, npeak = mergeCatalogs([self.catalog1, self.catalog2, self.catalog3],
241 ["1", "2", "3"], 10, self.idFactory)
242 self.assertEqual(nob, 19)
243 self.assertEqual(npeak, 25)
244 measArea = [i.getFootprint().getArea() for i in merge]
245 np.testing.assert_array_equal(pixArea, measArea)
247 for record in merge:
248 for peak in record.getFootprint().getPeaks():
249 if peak.get("merge_peak_1"):
250 self.assertTrue(record.get("merge_footprint_1"))
251 self.assertTrue(isPeakInCatalog(peak, self.catalog1))
252 elif peak.get("merge_peak_2"):
253 self.assertTrue(record.get("merge_footprint_2"))
254 self.assertTrue(isPeakInCatalog(peak, self.catalog2))
255 elif peak.get("merge_peak_3"):
256 self.assertTrue(record.get("merge_footprint_3"))
257 self.assertTrue(isPeakInCatalog(peak, self.catalog3))
258 else:
259 self.fail("At least one merge.peak flag must be set")
261 # Add all the catalogs with minPeak = 100 so no new peaks will be added
262 merge, nob, npeak = mergeCatalogs([self.catalog1, self.catalog2, self.catalog3],
263 ["1", "2", "3"], 100, self.idFactory)
264 self.assertEqual(nob, 19)
265 self.assertEqual(npeak, 20)
266 measArea = [i.getFootprint().getArea() for i in merge]
267 np.testing.assert_array_equal(pixArea, measArea)
269 for record in merge:
270 for peak in record.getFootprint().getPeaks():
271 if peak.get("merge_peak_1"):
272 self.assertTrue(record.get("merge_footprint_1"))
273 self.assertTrue(isPeakInCatalog(peak, self.catalog1))
274 elif peak.get("merge_peak_2"):
275 self.assertTrue(record.get("merge_footprint_2"))
276 self.assertTrue(isPeakInCatalog(peak, self.catalog2))
277 elif peak.get("merge_peak_3"):
278 self.assertTrue(record.get("merge_footprint_3"))
279 self.assertTrue(isPeakInCatalog(peak, self.catalog3))
280 else:
281 self.fail("At least one merge_peak flag must be set")
283 # Add footprints with large samePeakDist so that any footprint that merges will also
284 # have the peak flagged
285 merge, nob, npeak = mergeCatalogs([self.catalog1, self.catalog2, self.catalog3],
286 ["1", "2", "3"], 100, self.idFactory, samePeakDist=40)
288 # peaks detected in more than one catalog
289 multiPeakIndex = [0, 2, 5, 7, 9]
290 peakIndex = 0
291 for record in merge:
292 for peak in record.getFootprint().getPeaks():
293 numPeak = np.sum([peak.get("merge_peak_1"), peak.get("merge_peak_2"),
294 peak.get("merge_peak_3")])
295 if peakIndex in multiPeakIndex:
296 self.assertGreater(numPeak, 1)
297 else:
298 self.assertEqual(numPeak, 1)
299 peakIndex += 1
301 def testDM17431(self):
302 """Test that priority order is respected
304 specifically when lower priority footprints overlap two previously
305 disconnected higher priority footprints.
306 """
307 box = lsst.geom.Box2I(lsst.geom.Point2I(0, 0), lsst.geom.Point2I(100, 100))
308 psfsig = 1.
309 kernelSize = 41
310 flux = 1000
311 cat1 = {}
312 cat2 = {}
313 peakDist = 10
314 samePeakDist = 3
316 # cat2 connects cat1's 2 separated footprints.
317 # 65 and 70 are too close to be new or same peaks.
318 # 70 is higher priority and should be in the catalog instead of 65.
319 cat1['pos'] = [(50, 50), (70, 50)]
320 cat2['pos'] = [(55, 50), (65, 50)]
321 schema = afwTable.SourceTable.makeMinimalSchema()
322 idFactory = afwTable.IdFactory.makeSimple()
323 table = afwTable.SourceTable.make(schema, idFactory)
325 for (cat, psfFactor) in zip([cat1, cat2], [1, 2]):
326 cat['mi'] = afwImage.MaskedImageD(box)
327 cat['psf'] = afwDetect.GaussianPsf(kernelSize, kernelSize, psfFactor*psfsig)
328 insertPsf(cat['pos'], cat['mi'], cat['psf'], kernelSize, flux)
329 fp = afwDetect.FootprintSet(cat['mi'], afwDetect.Threshold(0.001), "DETECTED")
330 cat['catalog'] = afwTable.SourceCatalog(table)
331 fp.makeSources(cat['catalog'])
333 merge, nob, npeak = mergeCatalogs([cat1['catalog'], cat2['catalog']], ["1", "2"],
334 peakDist, idFactory, samePeakDist=samePeakDist)
336 # Check that both higher-priority cat1 records survive peak merging
337 for record in cat1['catalog']:
338 for peak in record.getFootprint().getPeaks():
339 self.assertTrue(isPeakInCatalog(peak, merge))
342class MemoryTester(lsst.utils.tests.MemoryTestCase):
343 pass
346def setup_module(module):
347 lsst.utils.tests.init()
350if __name__ == "__main__": 350 ↛ 351line 350 didn't jump to line 351, because the condition on line 350 was never true
351 lsst.utils.tests.init()
352 unittest.main()