Coverage for python/lsst/scarlet/lite/detect.py: 47%
49 statements
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-16 02:46 -0700
« prev ^ index » next coverage.py v7.5.1, created at 2024-05-16 02:46 -0700
1# This file is part of scarlet_lite.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22from __future__ import annotations
24import logging
25from typing import Sequence, cast
27import numpy as np
28from lsst.scarlet.lite.detect_pybind11 import Footprint # type: ignore
30from .bbox import Box
31from .image import Image
32from .utils import continue_class
33from .wavelet import get_multiresolution_support, starlet_transform
35logger = logging.getLogger("scarlet.detect")
38def bounds_to_bbox(bounds: tuple[int, int, int, int]) -> Box:
39 """Convert the bounds of a Footprint into a Box
41 Notes
42 -----
43 Unlike slices, the bounds are _inclusive_ of the end points.
45 Parameters
46 ----------
47 bounds:
48 The bounds of the `Footprint` as a `tuple` of
49 ``(bottom, top, left, right)``.
50 Returns
51 -------
52 result:
53 The `Box` created from the bounds
54 """
55 return Box(
56 (bounds[1] + 1 - bounds[0], bounds[3] + 1 - bounds[2]),
57 origin=(bounds[0], bounds[2]),
58 )
61@continue_class
62class Footprint: # type: ignore # noqa
63 @property
64 def bbox(self) -> Box:
65 """Bounding box for the Footprint
67 Returns
68 -------
69 bbox:
70 The minimal `Box` that contains the entire `Footprint`.
71 """
72 return bounds_to_bbox(self.bounds) # type: ignore
74 @property
75 def yx0(self) -> tuple[int, int]:
76 """Origin in y, x of the lower left corner of the footprint"""
77 return self.bounds[0], self.bounds[2] # type: ignore
79 def intersection(self, other: Footprint) -> Image | None:
80 """The intersection of two footprints
82 Parameters
83 ----------
84 other:
85 The other footprint to compare.
87 Returns
88 -------
89 intersection:
90 The intersection of two footprints.
91 """
92 footprint1 = Image(self.data, yx0=self.yx0) # type: ignore
93 footprint2 = Image(other.data, yx0=other.yx0) # type: ignore # noqa
94 return footprint1 & footprint2
96 def union(self, other: Footprint) -> Image | None:
97 """The intersection of two footprints
99 Parameters
100 ----------
101 other:
102 The other footprint to compare.
104 Returns
105 -------
106 union:
107 The union of two footprints.
108 """
109 footprint1 = Image(self.data, yx0=self.yx0) # type: ignore
110 footprint2 = Image(other.data, yx0=other.yx0)
111 return footprint1 | footprint2
114def footprints_to_image(footprints: Sequence[Footprint], shape: tuple[int, int]) -> Image:
115 """Convert a set of scarlet footprints to a pixelized image.
117 Parameters
118 ----------
119 footprints:
120 The footprints to convert into an image.
121 shape:
122 The shape of the image that is created from the footprints.
124 Returns
125 -------
126 result:
127 The image created from the footprints.
128 """
129 result = Image.from_box(Box(shape), dtype=int)
130 for k, footprint in enumerate(footprints):
131 bbox = bounds_to_bbox(footprint.bounds)
132 fp_image = Image(footprint.data, yx0=cast(tuple[int, int], bbox.origin))
133 result = result + fp_image * (k + 1)
134 return result
137def get_wavelets(images: np.ndarray, variance: np.ndarray, scales: int | None = None) -> np.ndarray:
138 """Calculate wavelet coefficents given a set of images and their variances
140 Parameters
141 ----------
142 images:
143 The array of images with shape `(bands, Ny, Nx)` for which to
144 calculate wavelet coefficients.
145 variance:
146 An array of variances with the same shape as `images`.
147 scales:
148 The maximum number of wavelet scales to use.
150 Returns
151 -------
152 coeffs:
153 The array of coefficents with shape `(scales+1, bands, Ny, Nx)`.
154 Note that the result has `scales+1` total arrays,
155 since the last set of coefficients is the image of all
156 flux with frequency greater than the last wavelet scale.
157 """
158 sigma = np.median(np.sqrt(variance), axis=(1, 2))
159 # Create the wavelet coefficients for the significant pixels
160 coeffs = []
161 for b, image in enumerate(images):
162 _coeffs = starlet_transform(image, scales=scales)
163 support = get_multiresolution_support(
164 image=image,
165 starlets=_coeffs,
166 sigma=sigma[b],
167 sigma_scaling=3,
168 epsilon=1e-1,
169 max_iter=20,
170 )
171 coeffs.append((support * _coeffs).astype(images.dtype))
172 return np.array(coeffs)
175def get_detect_wavelets(images: np.ndarray, variance: np.ndarray, scales: int = 3) -> np.ndarray:
176 """Get an array of wavelet coefficents to use for detection
178 Parameters
179 ----------
180 images:
181 The array of images with shape `(bands, Ny, Nx)` for which to
182 calculate wavelet coefficients.
183 variance:
184 An array of variances with the same shape as `images`.
185 scales:
186 The maximum number of wavelet scales to use.
187 Note that the result will have `scales+1` total arrays,
188 where the last set of coefficients is the image of all
189 flux with frequency greater than the last wavelet scale.
191 Returns
192 -------
193 starlets:
194 The array of wavelet coefficients for pixels with siignificant
195 amplitude in each scale.
196 """
197 sigma = np.median(np.sqrt(variance))
198 # Create the wavelet coefficients for the significant pixels
199 detect = np.sum(images, axis=0)
200 _coeffs = starlet_transform(detect, scales=scales)
201 support = get_multiresolution_support(
202 image=detect,
203 starlets=_coeffs,
204 sigma=sigma, # type: ignore
205 sigma_scaling=3,
206 epsilon=1e-1,
207 max_iter=20,
208 )
209 return (support * _coeffs).astype(images.dtype)