Coverage for tests/test_deblend.py: 19%
96 statements
« prev ^ index » next coverage.py v6.4, created at 2022-05-26 10:56 +0000
« prev ^ index » next coverage.py v6.4, created at 2022-05-26 10:56 +0000
1# This file is part of meas_extensions_scarlet.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22import unittest
24import numpy as np
26from lsst.geom import Point2I
27import lsst.utils.tests
28import lsst.afw.image as afwImage
29from lsst.meas.algorithms import SourceDetectionTask
30from lsst.meas.extensions.scarlet import ScarletDeblendTask
31from lsst.afw.table import SourceCatalog
32from lsst.afw.detection import Footprint
33from lsst.afw.detection.multiband import heavyFootprintToImage
34from lsst.afw.geom import SpanSet, Stencil
36from utils import initData
39class TestDeblend(lsst.utils.tests.TestCase):
40 def test_deblend_task(self):
41 # Set the random seed so that the noise field is unaffected
42 np.random.seed(0)
43 shape = (5, 100, 115)
44 coords = [
45 # blend
46 (15, 25), (10, 30), (17, 38),
47 # isolated source
48 (85, 90),
49 ]
50 amplitudes = [
51 # blend
52 80, 60, 90,
53 # isolated source
54 20,
55 ]
56 result = initData(shape, coords, amplitudes)
57 targetPsfImage, psfImages, images, channels, seds, morphs, targetPsf, psfs = result
58 B, Ny, Nx = shape
60 # Add some noise, otherwise the task will blow up due to
61 # zero variance
62 noise = 10*(np.random.rand(*images.shape).astype(np.float32)-.5)
63 images += noise
65 filters = "grizy"
66 _images = afwImage.MultibandMaskedImage.fromArrays(filters, images.astype(np.float32), None, noise**2)
67 coadds = [afwImage.Exposure(img, dtype=img.image.array.dtype) for img in _images]
68 coadds = afwImage.MultibandExposure.fromExposures(filters, coadds)
69 for b, coadd in enumerate(coadds):
70 coadd.setPsf(psfs[b])
72 schema = SourceCatalog.Table.makeMinimalSchema()
74 detectionTask = SourceDetectionTask(schema=schema)
76 # Adjust config options to test skipping parents
77 config = ScarletDeblendTask.ConfigClass()
78 config.maxIter = 100
79 config.maxFootprintArea = 1000
80 config.maxNumberOfPeaks = 4
81 deblendTask = ScarletDeblendTask(schema=schema, config=config)
83 table = SourceCatalog.Table.make(schema)
84 detectionResult = detectionTask.run(table, coadds["r"])
85 catalog = detectionResult.sources
87 # Add a footprint that is too large
88 src = catalog.addNew()
89 halfLength = int(np.ceil(np.sqrt(config.maxFootprintArea) + 1))
90 ss = SpanSet.fromShape(halfLength, Stencil.BOX, offset=(50, 50))
91 bigfoot = Footprint(ss)
92 bigfoot.addPeak(50, 50, 100)
93 src.setFootprint(bigfoot)
95 # Add a footprint with too many peaks
96 src = catalog.addNew()
97 ss = SpanSet.fromShape(10, Stencil.BOX, offset=(75, 20))
98 denseFoot = Footprint(ss)
99 for n in range(config.maxNumberOfPeaks+1):
100 denseFoot.addPeak(70+2*n, 15+2*n, 10*n)
101 src.setFootprint(denseFoot)
103 # Run the deblender
104 result, flux = deblendTask.run(coadds, catalog)
106 # Make sure that the catalogs have the same sources in all bands,
107 # and check that band-independent columns are equal
108 bandIndependentColumns = [
109 "id",
110 "parent",
111 "deblend_nPeaks",
112 "deblend_nChild",
113 "deblend_peak_center_x",
114 "deblend_peak_center_y",
115 "deblend_runtime",
116 "deblend_iterations",
117 "deblend_logL",
118 "deblend_spectrumInitFlag",
119 "deblend_blendConvergenceFailedFlag",
120 ]
121 self.assertEqual(len(filters), len(result))
122 self.assertEqual(len(filters), len(flux))
123 ref = result[filters[0]]
124 for f in filters[1:]:
125 for col in bandIndependentColumns:
126 np.testing.assert_array_equal(result[f][col], ref[col])
128 # Check that other columns are consistent
129 for f, _catalog in result.items():
130 parents = _catalog[_catalog["parent"] == 0]
131 # Check that the number of deblended children is consistent
132 self.assertEqual(np.sum(_catalog["deblend_nChild"]), len(_catalog)-len(parents))
134 for parent in parents:
135 children = _catalog[_catalog["parent"] == parent.get("id")]
136 # Check that nChild is set correctly
137 self.assertEqual(len(children), parent.get("deblend_nChild"))
138 # Check that parent columns are propagated to their children
139 for parentCol, childCol in config.columnInheritance.items():
140 np.testing.assert_array_equal(parent.get(parentCol), children[childCol])
142 children = _catalog[_catalog["parent"] != 0]
143 for child in children:
144 fp = child.getFootprint()
145 img = heavyFootprintToImage(fp)
146 # Check that the flux at the center is correct.
147 # Note: this only works in this test image because the
148 # detected peak is in the same location as the scarlet peak.
149 # If the peak is shifted, the flux value will be correct
150 # but deblend_peak_center is not the correct location.
151 px = child.get("deblend_peak_center_x")
152 py = child.get("deblend_peak_center_y")
153 flux = img.image[Point2I(px, py)]
154 self.assertEqual(flux, child.get("deblend_peak_instFlux"))
156 # Check that the peak positions match the catalog entry
157 peaks = fp.getPeaks()
158 self.assertEqual(px, peaks[0].getIx())
159 self.assertEqual(py, peaks[0].getIy())
161 # Check that all sources have the correct number of peaks
162 for src in _catalog:
163 fp = src.getFootprint()
164 self.assertEqual(len(fp.peaks), src.get("deblend_nPeaks"))
166 # Check that only the large foorprint was flagged as too big
167 largeFootprint = np.zeros(len(_catalog), dtype=bool)
168 largeFootprint[2] = True
169 np.testing.assert_array_equal(largeFootprint, _catalog["deblend_parentTooBig"])
171 # Check that only the dense foorprint was flagged as too dense
172 denseFootprint = np.zeros(len(_catalog), dtype=bool)
173 denseFootprint[3] = True
174 np.testing.assert_array_equal(denseFootprint, _catalog["deblend_tooManyPeaks"])
176 # Check that only the appropriate parents were skipped
177 skipped = largeFootprint | denseFootprint
178 np.testing.assert_array_equal(skipped, _catalog["deblend_skipped"])
181class MemoryTester(lsst.utils.tests.MemoryTestCase):
182 pass
185def setup_module(module):
186 lsst.utils.tests.init()
189if __name__ == "__main__": 189 ↛ 190line 189 didn't jump to line 190, because the condition on line 189 was never true
190 lsst.utils.tests.init()
191 unittest.main()