Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of meas_extensions_scarlet. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from functools import partial 

23 

24import numpy as np 

25import scarlet 

26from scarlet.psf import PSF, gaussian 

27from scarlet import Blend, Frame, Observation 

28from scarlet_extensions.initialization.source import initAllSources 

29 

30import lsst.log 

31import lsst.pex.config as pexConfig 

32from lsst.pex.exceptions import InvalidParameterError 

33import lsst.pipe.base as pipeBase 

34from lsst.geom import Point2I, Box2I, Point2D 

35import lsst.afw.math as afwMath 

36import lsst.afw.geom as afwGeom 

37import lsst.afw.geom.ellipses as afwEll 

38import lsst.afw.image.utils 

39import lsst.afw.image as afwImage 

40import lsst.afw.detection as afwDet 

41import lsst.afw.table as afwTable 

42 

43from .source import modelToHeavy 

44 

45__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"] 

46 

47logger = lsst.log.Log.getLogger("meas.deblender.deblend") 

48 

49 

50class IncompleteDataError(Exception): 

51 """The PSF could not be computed due to incomplete data 

52 """ 

53 pass 

54 

55 

56class ScarletGradientError(Exception): 

57 """An error occurred during optimization 

58 

59 This error occurs when the optimizer encounters 

60 a NaN value while calculating the gradient. 

61 """ 

62 def __init__(self, iterations, sources): 

63 self.iterations = iterations 

64 self.sources = sources 

65 msg = ("ScalarGradientError in iteration {0}. " 

66 "NaN values introduced in sources {1}") 

67 self.message = msg.format(iterations, sources) 

68 

69 def __str__(self): 

70 return self.message 

71 

72 

73def _checkBlendConvergence(blend, f_rel): 

74 """Check whether or not a blend has converged 

75 """ 

76 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1]) 

77 convergence = f_rel * np.abs(blend.loss[-1]) 

78 return deltaLoss < convergence 

79 

80 

81def _getPsfFwhm(psf): 

82 """Calculate the FWHM of the `psf` 

83 """ 

84 return psf.computeShape().getDeterminantRadius() * 2.35 

85 

86 

87def _estimateRMS(exposure, statsMask): 

88 """Estimate the standard dev. of an image 

89 

90 Calculate the RMS of the `exposure`. 

91 """ 

92 mi = exposure.getMaskedImage() 

93 statsCtrl = afwMath.StatisticsControl() 

94 statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(statsMask)) 

95 stats = afwMath.makeStatistics(mi.variance, mi.mask, afwMath.STDEV | afwMath.MEAN, statsCtrl) 

96 rms = np.sqrt(stats.getValue(afwMath.MEAN)**2 + stats.getValue(afwMath.STDEV)**2) 

97 return rms 

98 

99 

100def _computePsfImage(self, position=None): 

101 """Get a multiband PSF image 

102 The PSF Kernel Image is computed for each band 

103 and combined into a (filter, y, x) array and stored 

104 as `self._psfImage`. 

105 The result is not cached, so if the same PSF is expected 

106 to be used multiple times it is a good idea to store the 

107 result in another variable. 

108 Note: this is a temporary fix during the deblender sprint. 

109 In the future this function will replace the current method 

110 in `afw.MultibandExposure.computePsfImage` (DM-19789). 

111 Parameters 

112 ---------- 

113 position : `Point2D` or `tuple` 

114 Coordinates to evaluate the PSF. If `position` is `None` 

115 then `Psf.getAveragePosition()` is used. 

116 Returns 

117 ------- 

118 self._psfImage: array 

119 The multiband PSF image. 

120 """ 

121 psfs = [] 

122 # Make the coordinates into a Point2D (if necessary) 

123 if not isinstance(position, Point2D) and position is not None: 

124 position = Point2D(position[0], position[1]) 

125 

126 for bidx, single in enumerate(self.singles): 

127 try: 

128 if position is None: 

129 psf = single.getPsf().computeImage() 

130 psfs.append(psf) 

131 else: 

132 psf = single.getPsf().computeImage(position) 

133 psfs.append(psf) 

134 except InvalidParameterError: 

135 # This band failed to compute the PSF due to incomplete data 

136 # at that location. This is unlikely to be a problem for Rubin, 

137 # however the edges of some HSC COSMOS fields contain incomplete 

138 # data in some bands, so we track this error to distinguish it 

139 # from unknown errors. 

140 msg = "Failed to compute PSF at {} in band {}" 

141 raise IncompleteDataError(msg.format(position, self.filters[bidx])) 

142 

143 left = np.min([psf.getBBox().getMinX() for psf in psfs]) 

144 bottom = np.min([psf.getBBox().getMinY() for psf in psfs]) 

145 right = np.max([psf.getBBox().getMaxX() for psf in psfs]) 

146 top = np.max([psf.getBBox().getMaxY() for psf in psfs]) 

147 bbox = Box2I(Point2I(left, bottom), Point2I(right, top)) 

148 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs] 

149 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs) 

150 return psfImage 

151 

152 

153def getFootprintMask(footprint, mExposure): 

154 """Mask pixels outside the footprint 

155 

156 Parameters 

157 ---------- 

158 mExposure : `lsst.image.MultibandExposure` 

159 - The multiband exposure containing the image, 

160 mask, and variance data 

161 footprint : `lsst.detection.Footprint` 

162 - The footprint of the parent to deblend 

163 

164 Returns 

165 ------- 

166 footprintMask : array 

167 Boolean array with pixels not in the footprint set to one. 

168 """ 

169 bbox = footprint.getBBox() 

170 fpMask = afwImage.Mask(bbox) 

171 footprint.spans.setMask(fpMask, 1) 

172 fpMask = ~fpMask.getArray().astype(bool) 

173 return fpMask 

174 

175 

176def deblend(mExposure, footprint, config): 

177 """Deblend a parent footprint 

178 

179 Parameters 

180 ---------- 

181 mExposure : `lsst.image.MultibandExposure` 

182 - The multiband exposure containing the image, 

183 mask, and variance data 

184 footprint : `lsst.detection.Footprint` 

185 - The footprint of the parent to deblend 

186 config : `ScarletDeblendConfig` 

187 - Configuration of the deblending task 

188 """ 

189 # Extract coordinates from each MultiColorPeak 

190 bbox = footprint.getBBox() 

191 

192 # Create the data array from the masked images 

193 images = mExposure.image[:, bbox].array 

194 

195 # Use the inverse variance as the weights 

196 if config.useWeights: 

197 weights = 1/mExposure.variance[:, bbox].array 

198 else: 

199 weights = np.ones_like(images) 

200 

201 # Mask out the pixels outside the footprint 

202 mask = getFootprintMask(footprint, mExposure) 

203 weights *= ~mask 

204 

205 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32) 

206 

207 psfShape = (config.modelPsfSize, config.modelPsfSize) 

208 model_psf = PSF(partial(gaussian, sigma=config.modelPsfSigma), shape=(None,)+psfShape) 

209 

210 frame = Frame(images.shape, psfs=model_psf, channels=mExposure.filters) 

211 observation = Observation(images, psfs=psfs, weights=weights, channels=mExposure.filters) 

212 observation.match(frame) 

213 

214 assert(config.sourceModel in ["single", "double", "point", "fit"]) 

215 

216 # Set the appropriate number of components 

217 if config.sourceModel == "single": 

218 maxComponents = 1 

219 elif config.sourceModel == "double": 

220 maxComponents = 2 

221 elif config.sourceModel == "point": 

222 maxComponents = 0 

223 elif config.sourceModel == "fit": 

224 # It is likely in the future that there will be some heuristic 

225 # used to determine what type of model to use for each source, 

226 # but that has not yet been implemented (see DM-22551) 

227 raise NotImplementedError("sourceModel 'fit' has not been implemented yet") 

228 

229 # Convert the centers to pixel coordinates 

230 xmin = bbox.getMinX() 

231 ymin = bbox.getMinY() 

232 centers = [np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int) for peak in footprint.peaks] 

233 

234 # Only deblend sources that can be initialized 

235 sources, skipped = initAllSources( 

236 frame=frame, 

237 centers=centers, 

238 observation=observation, 

239 symmetric=config.symmetric, 

240 monotonic=config.monotonic, 

241 thresh=config.morphThresh, 

242 maxComponents=maxComponents, 

243 edgeDistance=config.edgeDistance, 

244 shifting=False, 

245 downgrade=config.downgrade, 

246 fallback=config.fallback, 

247 ) 

248 

249 # Attach the peak to all of the initialized sources 

250 for k, src in enumerate(sources): 

251 src.detectedPeak = footprint.peaks[k] 

252 

253 # Create the blend and attempt to optimize it 

254 blend = Blend(sources, observation) 

255 try: 

256 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError) 

257 except ArithmeticError: 

258 # This occurs when a gradient update produces a NaN value 

259 # This is usually due to a source initialized with a 

260 # negative SED or no flux, often because the peak 

261 # is a noise fluctuation in one band and not a real source. 

262 iterations = len(blend.loss) 

263 failedSources = [] 

264 for k, src in enumerate(sources): 

265 if np.any(~np.isfinite(src.get_model())): 

266 failedSources.append(k) 

267 raise ScarletGradientError(iterations, failedSources) 

268 

269 return blend, skipped 

270 

271 

272class ScarletDeblendConfig(pexConfig.Config): 

273 """MultibandDeblendConfig 

274 

275 Configuration for the multiband deblender. 

276 The parameters are organized by the parameter types, which are 

277 - Stopping Criteria: Used to determine if the fit has converged 

278 - Position Fitting Criteria: Used to fit the positions of the peaks 

279 - Constraints: Used to apply constraints to the peaks and their components 

280 - Other: Parameters that don't fit into the above categories 

281 """ 

282 # Stopping Criteria 

283 maxIter = pexConfig.Field(dtype=int, default=300, 

284 doc=("Maximum number of iterations to deblend a single parent")) 

285 relativeError = pexConfig.Field(dtype=float, default=1e-4, 

286 doc=("Change in the loss function between" 

287 "iterations to exit fitter")) 

288 

289 # Blend Configuration options 

290 edgeDistance = pexConfig.Field(dtype=int, default=1, 

291 doc="All sources with flux within `edgeDistance` from the edge " 

292 "will be considered edge sources.") 

293 

294 # Constraints 

295 morphThresh = pexConfig.Field(dtype=float, default=1, 

296 doc="Fraction of background RMS a pixel must have" 

297 "to be included in the initial morphology") 

298 monotonic = pexConfig.Field(dtype=bool, default=True, doc="Make models monotonic") 

299 symmetric = pexConfig.Field(dtype=bool, default=False, doc="Make models symmetric") 

300 

301 # Other scarlet paremeters 

302 useWeights = pexConfig.Field( 

303 dtype=bool, default=True, 

304 doc=("Whether or not use use inverse variance weighting." 

305 "If `useWeights` is `False` then flat weights are used")) 

306 modelPsfSize = pexConfig.Field( 

307 dtype=int, default=11, 

308 doc="Model PSF side length in pixels") 

309 modelPsfSigma = pexConfig.Field( 

310 dtype=float, default=0.8, 

311 doc="Define sigma for the model frame PSF") 

312 saveTemplates = pexConfig.Field( 

313 dtype=bool, default=True, 

314 doc="Whether or not to save the SEDs and templates") 

315 processSingles = pexConfig.Field( 

316 dtype=bool, default=False, 

317 doc="Whether or not to process isolated sources in the deblender") 

318 sourceModel = pexConfig.Field( 

319 dtype=str, default="single", 

320 doc=("How to determine which model to use for sources, from\n" 

321 "- 'single': use a single component for all sources\n" 

322 "- 'double': use a bulge disk model for all sources\n" 

323 "- 'point: use a point-source model for all sources\n" 

324 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)") 

325 ) 

326 downgrade = pexConfig.Field( 

327 dtype=bool, default=False, 

328 doc="Whether or not to downgrade the number of components for sources in small bounding boxes" 

329 ) 

330 

331 # Mask-plane restrictions 

332 badMask = pexConfig.ListField( 

333 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT"], 

334 doc="Whether or not to process isolated sources in the deblender") 

335 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"], 

336 doc="Mask planes to ignore when performing statistics") 

337 maskLimits = pexConfig.DictField( 

338 keytype=str, 

339 itemtype=float, 

340 default={}, 

341 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. " 

342 "Sources violating this limit will not be deblended."), 

343 ) 

344 

345 # Size restrictions 

346 maxNumberOfPeaks = pexConfig.Field( 

347 dtype=int, default=0, 

348 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent" 

349 " (<= 0: unlimited)")) 

350 maxFootprintArea = pexConfig.Field( 

351 dtype=int, default=1000000, 

352 doc=("Maximum area for footprints before they are ignored as large; " 

353 "non-positive means no threshold applied")) 

354 maxFootprintSize = pexConfig.Field( 

355 dtype=int, default=0, 

356 doc=("Maximum linear dimension for footprints before they are ignored " 

357 "as large; non-positive means no threshold applied")) 

358 minFootprintAxisRatio = pexConfig.Field( 

359 dtype=float, default=0.0, 

360 doc=("Minimum axis ratio for footprints before they are ignored " 

361 "as large; non-positive means no threshold applied")) 

362 

363 # Failure modes 

364 fallback = pexConfig.Field( 

365 dtype=bool, default=True, 

366 doc="Whether or not to fallback to a smaller number of components if a source does not initialize" 

367 ) 

368 notDeblendedMask = pexConfig.Field( 

369 dtype=str, default="NOT_DEBLENDED", optional=True, 

370 doc="Mask name for footprints not deblended, or None") 

371 catchFailures = pexConfig.Field( 

372 dtype=bool, default=False, 

373 doc=("If True, catch exceptions thrown by the deblender, log them, " 

374 "and set a flag on the parent, instead of letting them propagate up")) 

375 propagateAllPeaks = pexConfig.Field(dtype=bool, default=False, 

376 doc=('Guarantee that all peaks produce a child source.')) 

377 

378 

379class ScarletDeblendTask(pipeBase.Task): 

380 """ScarletDeblendTask 

381 

382 Split blended sources into individual sources. 

383 

384 This task has no return value; it only modifies the SourceCatalog in-place. 

385 """ 

386 ConfigClass = ScarletDeblendConfig 

387 _DefaultName = "scarletDeblend" 

388 

389 def __init__(self, schema, peakSchema=None, **kwargs): 

390 """Create the task, adding necessary fields to the given schema. 

391 

392 Parameters 

393 ---------- 

394 schema : `lsst.afw.table.schema.schema.Schema` 

395 Schema object for measurement fields; will be modified in-place. 

396 peakSchema : `lsst.afw.table.schema.schema.Schema` 

397 Schema of Footprint Peaks that will be passed to the deblender. 

398 Any fields beyond the PeakTable minimal schema will be transferred 

399 to the main source Schema. If None, no fields will be transferred 

400 from the Peaks. 

401 filters : list of str 

402 Names of the filters used for the eposures. This is needed to store 

403 the SED as a field 

404 **kwargs 

405 Passed to Task.__init__. 

406 """ 

407 pipeBase.Task.__init__(self, **kwargs) 

408 

409 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema() 

410 if peakSchema is None: 

411 # In this case, the peakSchemaMapper will transfer nothing, but 

412 # we'll still have one 

413 # to simplify downstream code 

414 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema) 

415 else: 

416 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema) 

417 for item in peakSchema: 

418 if item.key not in peakMinimalSchema: 

419 self.peakSchemaMapper.addMapping(item.key, item.field) 

420 # Because SchemaMapper makes a copy of the output schema 

421 # you give its ctor, it isn't updating this Schema in 

422 # place. That's probably a design flaw, but in the 

423 # meantime, we'll keep that schema in sync with the 

424 # peakSchemaMapper.getOutputSchema() manually, by adding 

425 # the same fields to both. 

426 schema.addField(item.field) 

427 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas" 

428 self._addSchemaKeys(schema) 

429 self.schema = schema 

430 

431 def _addSchemaKeys(self, schema): 

432 """Add deblender specific keys to the schema 

433 """ 

434 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms') 

435 

436 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge') 

437 

438 self.nChildKey = schema.addField('deblend_nChild', type=np.int32, 

439 doc='Number of children this object has (defaults to 0)') 

440 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag', 

441 doc='Deblender thought this source looked like a PSF') 

442 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag', 

443 doc='Source had too many peaks; ' 

444 'only the brightest were included') 

445 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag', 

446 doc='Parent footprint covered too many pixels') 

447 self.maskedKey = schema.addField('deblend_masked', type='Flag', 

448 doc='Parent footprint was predominantly masked') 

449 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag', 

450 doc='scarlet sed optimization did not converge before' 

451 'config.maxIter') 

452 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag', 

453 doc='scarlet morph optimization did not converge before' 

454 'config.maxIter') 

455 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag', 

456 type='Flag', 

457 doc='at least one source in the blend' 

458 'failed to converge') 

459 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag', 

460 doc='Source had flux on the edge of the parent footprint') 

461 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag', 

462 doc="Deblending failed on source") 

463 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25, 

464 doc='Name of error if the blend failed') 

465 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag', 

466 doc="Deblender skipped this source") 

467 self.modelCenter = afwTable.Point2DKey.addFields(schema, name="deblend_peak_center", 

468 doc="Center used to apply constraints in scarlet", 

469 unit="pixel") 

470 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count', 

471 doc="The instFlux at the peak position of deblended mode") 

472 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=20, 

473 doc="The type of model used, for example " 

474 "MultiExtendedSource, SingleExtendedSource, PointSource") 

475 self.edgeFluxFlagKey = schema.addField("deblend_edgeFluxFlag", type="Flag", 

476 doc="Source has flux on the edge of the image") 

477 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32, 

478 doc="Flux measurement from scarlet") 

479 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32, 

480 doc="Number of initial peaks in the blend. " 

481 "This includes peaks that may have been culled " 

482 "during deblending or failed to deblend") 

483 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32, 

484 doc="Final logL, used to identify regressions in scarlet.") 

485 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in 

486 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey)) 

487 # ) 

488 

489 @pipeBase.timeMethod 

490 def run(self, mExposure, mergedSources): 

491 """Get the psf from each exposure and then run deblend(). 

492 

493 Parameters 

494 ---------- 

495 mExposure : `MultibandExposure` 

496 The exposures should be co-added images of the same 

497 shape and region of the sky. 

498 mergedSources : `SourceCatalog` 

499 The merged `SourceCatalog` that contains parent footprints 

500 to (potentially) deblend. 

501 

502 Returns 

503 ------- 

504 fluxCatalogs: dict or None 

505 Keys are the names of the filters and the values are 

506 `lsst.afw.table.source.source.SourceCatalog`'s. 

507 These are the flux-conserved catalogs with heavy footprints with 

508 the image data weighted by the multiband templates. 

509 If `self.config.conserveFlux` is `False`, then this item will be 

510 None 

511 templateCatalogs: dict or None 

512 Keys are the names of the filters and the values are 

513 `lsst.afw.table.source.source.SourceCatalog`'s. 

514 These are catalogs with heavy footprints that are the templates 

515 created by the multiband templates. 

516 If `self.config.saveTemplates` is `False`, then this item will be 

517 None 

518 """ 

519 return self.deblend(mExposure, mergedSources) 

520 

521 @pipeBase.timeMethod 

522 def deblend(self, mExposure, sources): 

523 """Deblend a data cube of multiband images 

524 

525 Parameters 

526 ---------- 

527 mExposure : `MultibandExposure` 

528 The exposures should be co-added images of the same 

529 shape and region of the sky. 

530 sources : `SourceCatalog` 

531 The merged `SourceCatalog` that contains parent footprints 

532 to (potentially) deblend. 

533 

534 Returns 

535 ------- 

536 fluxCatalogs : dict or None 

537 Keys are the names of the filters and the values are 

538 `lsst.afw.table.source.source.SourceCatalog`'s. 

539 These are the flux-conserved catalogs with heavy footprints with 

540 the image data weighted by the multiband templates. 

541 If `self.config.conserveFlux` is `False`, then this item will be 

542 None 

543 templateCatalogs : dict or None 

544 Keys are the names of the filters and the values are 

545 `lsst.afw.table.source.source.SourceCatalog`'s. 

546 These are catalogs with heavy footprints that are the templates 

547 created by the multiband templates. 

548 If `self.config.saveTemplates` is `False`, then this item will be 

549 None 

550 """ 

551 import time 

552 

553 filters = mExposure.filters 

554 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure))) 

555 

556 # Create the output catalogs 

557 templateCatalogs = {} 

558 # This must be returned but is not calculated right now, setting it to 

559 # None to be consistent with doc string 

560 fluxCatalogs = None 

561 for f in filters: 

562 _catalog = afwTable.SourceCatalog(sources.table.clone()) 

563 _catalog.extend(sources) 

564 templateCatalogs[f] = _catalog 

565 

566 n0 = len(sources) 

567 nparents = 0 

568 for pk, src in enumerate(sources): 

569 foot = src.getFootprint() 

570 bbox = foot.getBBox() 

571 logger.info("id: {0}".format(src["id"])) 

572 peaks = foot.getPeaks() 

573 

574 # Since we use the first peak for the parent object, we should 

575 # propagate its flags to the parent source. 

576 src.assign(peaks[0], self.peakSchemaMapper) 

577 

578 # Block of Skipping conditions 

579 if len(peaks) < 2 and not self.config.processSingles: 

580 for f in filters: 

581 templateCatalogs[f][pk].set(self.runtimeKey, 0) 

582 continue 

583 if self._isLargeFootprint(foot): 

584 src.set(self.tooBigKey, True) 

585 self._skipParent(src, mExposure.mask) 

586 self.log.trace('Parent %i: skipping large footprint', int(src.getId())) 

587 continue 

588 if self._isMasked(foot, mExposure): 

589 src.set(self.maskedKey, True) 

590 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

591 mask = afwImage.MaskX(mask, xy0=bbox.getMin()) 

592 self._skipParent(src, mask) 

593 self.log.trace('Parent %i: skipping masked footprint', int(src.getId())) 

594 continue 

595 if len(peaks) > self.config.maxNumberOfPeaks: 

596 src.set(self.tooManyPeaksKey, True) 

597 msg = 'Parent {0}: Too many peaks, using the first {1} peaks' 

598 self.log.trace(msg.format(int(src.getId()), self.config.maxNumberOfPeaks)) 

599 

600 nparents += 1 

601 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks)) 

602 # Run the deblender 

603 blendError = None 

604 try: 

605 t0 = time.time() 

606 # Build the parameter lists with the same ordering 

607 blend, skipped = deblend(mExposure, foot, self.config) 

608 tf = time.time() 

609 runtime = (tf-t0)*1000 

610 src.set(self.deblendFailedKey, False) 

611 src.set(self.runtimeKey, runtime) 

612 converged = _checkBlendConvergence(blend, self.config.relativeError) 

613 src.set(self.blendConvergenceFailedFlagKey, converged) 

614 sources = [src for src in blend.sources] 

615 # Re-insert place holders for skipped sources 

616 # to propagate them in the catalog so 

617 # that the peaks stay consistent 

618 for k in skipped: 

619 sources.insert(k, None) 

620 # Catch all errors and filter out the ones that we know about 

621 except Exception as e: 

622 blendError = type(e).__name__ 

623 if isinstance(e, ScarletGradientError): 

624 src.set(self.iterKey, e.iterations) 

625 elif not isinstance(e, IncompleteDataError): 

626 blendError = "UnknownError" 

627 

628 if self.config.catchFailures: 

629 # Make it easy to find UnknownErrors in the log file 

630 self.log.warn("UnknownError") 

631 import traceback 

632 traceback.print_exc() 

633 else: 

634 raise 

635 

636 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), blendError)) 

637 src.set(self.deblendFailedKey, True) 

638 src.set(self.runtimeKey, 0) 

639 src.set(self.deblendErrorKey, blendError) 

640 bbox = foot.getBBox() 

641 src.set(self.modelCenter, Point2D(bbox.getMinX(), bbox.getMinY())) 

642 # We want to store the total number of initial peaks, 

643 # even if some of them fail 

644 src.set(self.nPeaksKey, len(foot.peaks)) 

645 continue 

646 

647 # Add the merged source as a parent in the catalog for each band 

648 templateParents = {} 

649 parentId = src.getId() 

650 for f in filters: 

651 templateParents[f] = templateCatalogs[f][pk] 

652 templateParents[f].set(self.nPeaksKey, len(foot.peaks)) 

653 templateParents[f].set(self.runtimeKey, runtime) 

654 templateParents[f].set(self.iterKey, len(blend.loss)) 

655 # TODO: When DM-26603 is merged observation has a "log_norm" 

656 # property that performs the following calculation, 

657 # so this code block can be removed 

658 observation = blend.observations[0] 

659 _weights = observation.weights 

660 _images = observation.images 

661 log_sigma = np.zeros(_weights.shape, dtype=_weights.dtype) 

662 cuts = _weights > 0 

663 log_sigma[cuts] = np.log(1/_weights[cuts]) 

664 log_norm = np.prod(_images.shape)/2 * np.log(2*np.pi)+np.sum(log_sigma)/2 

665 # end temporary code block 

666 logL = blend.loss[-1]-log_norm 

667 templateParents[f].set(self.scarletLogLKey, logL) 

668 

669 # Add each source to the catalogs in each band 

670 templateSpans = {f: afwGeom.SpanSet() for f in filters} 

671 nchild = 0 

672 for k, source in enumerate(sources): 

673 # Skip any sources with no flux or that scarlet skipped because 

674 # it could not initialize 

675 if k in skipped: 

676 if not self.config.propagateAllPeaks: 

677 # We don't care 

678 continue 

679 # We need to preserve the peak: make sure we have enough 

680 # info to create a minimal child src 

681 msg = "Peak at {0} failed deblending. Using minimal default info for child." 

682 self.log.trace(msg.format(src.getFootprint().peaks[k])) 

683 # copy the full footprint and strip out extra peaks 

684 foot = afwDet.Footprint(src.getFootprint()) 

685 peakList = foot.getPeaks() 

686 peakList.clear() 

687 peakList.append(src.peaks[k]) 

688 zeroMimg = afwImage.MaskedImageF(foot.getBBox()) 

689 heavy = afwDet.makeHeavyFootprint(foot, zeroMimg) 

690 models = afwDet.MultibandFootprint(mExposure.filters, [heavy]*len(mExposure.filters)) 

691 else: 

692 src.set(self.deblendSkippedKey, False) 

693 models = modelToHeavy(source, filters, xy0=bbox.getMin(), 

694 observation=blend.observations[0]) 

695 

696 flux = scarlet.measure.flux(source) 

697 for fidx, f in enumerate(filters): 

698 if len(models[f].getPeaks()) != 1: 

699 err = "Heavy footprint should have a single peak, got {0}" 

700 raise ValueError(err.format(len(models[f].peaks))) 

701 cat = templateCatalogs[f] 

702 child = self._addChild(parentId, cat, models[f], source, converged, 

703 xy0=bbox.getMin(), flux=flux[fidx]) 

704 if parentId == 0: 

705 child.setId(src.getId()) 

706 child.set(self.runtimeKey, runtime) 

707 else: 

708 templateSpans[f] = templateSpans[f].union(models[f].getSpans()) 

709 nchild += 1 

710 

711 # Child footprints may extend beyond the full extent of their 

712 # parent's which results in a failure of the replace-by-noise code 

713 # to reinstate these pixels to their original values. The 

714 # following updates the parent footprint in-place to ensure it 

715 # contains the full union of itself and all of its 

716 # children's footprints. 

717 for f in filters: 

718 templateParents[f].set(self.nChildKey, nchild) 

719 templateParents[f].getFootprint().setSpans(templateSpans[f]) 

720 

721 K = len(list(templateCatalogs.values())[0]) 

722 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources' 

723 % (n0, nparents, K-n0, K)) 

724 return fluxCatalogs, templateCatalogs 

725 

726 def _isLargeFootprint(self, footprint): 

727 """Returns whether a Footprint is large 

728 

729 'Large' is defined by thresholds on the area, size and axis ratio. 

730 These may be disabled independently by configuring them to be 

731 non-positive. 

732 

733 This is principally intended to get rid of satellite streaks, which the 

734 deblender or other downstream processing can have trouble dealing with 

735 (e.g., multiple large HeavyFootprints can chew up memory). 

736 """ 

737 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea: 

738 return True 

739 if self.config.maxFootprintSize > 0: 

740 bbox = footprint.getBBox() 

741 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize: 

742 return True 

743 if self.config.minFootprintAxisRatio > 0: 

744 axes = afwEll.Axes(footprint.getShape()) 

745 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA(): 

746 return True 

747 return False 

748 

749 def _isMasked(self, footprint, mExposure): 

750 """Returns whether the footprint violates the mask limits""" 

751 bbox = footprint.getBBox() 

752 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

753 size = float(footprint.getArea()) 

754 for maskName, limit in self.config.maskLimits.items(): 

755 maskVal = mExposure.mask.getPlaneBitMask(maskName) 

756 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin()) 

757 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels 

758 if (size - unmaskedSpan.getArea())/size > limit: 

759 return True 

760 return False 

761 

762 def _skipParent(self, source, masks): 

763 """Indicate that the parent source is not being deblended 

764 

765 We set the appropriate flags and masks for each exposure. 

766 

767 Parameters 

768 ---------- 

769 source : `lsst.afw.table.source.source.SourceRecord` 

770 The source to flag as skipped 

771 masks : list of `lsst.afw.image.MaskX` 

772 The mask in each band to update with the non-detection 

773 """ 

774 fp = source.getFootprint() 

775 source.set(self.deblendSkippedKey, True) 

776 source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all 

777 if self.config.notDeblendedMask: 

778 for mask in masks: 

779 mask.addMaskPlane(self.config.notDeblendedMask) 

780 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask)) 

781 

782 def _addChild(self, parentId, sources, heavy, scarlet_source, blend_converged, xy0, flux): 

783 """Add a child to a catalog 

784 

785 This creates a new child in the source catalog, 

786 assigning it a parent id, adding a footprint, 

787 and setting all appropriate flags based on the 

788 deblender result. 

789 """ 

790 assert len(heavy.getPeaks()) == 1 

791 src = sources.addNew() 

792 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper) 

793 src.setParent(parentId) 

794 src.setFootprint(heavy) 

795 src.set(self.psfKey, False) 

796 src.set(self.runtimeKey, 0) 

797 src.set(self.blendConvergenceFailedFlagKey, not blend_converged) 

798 

799 try: 

800 cy, cx = scarlet_source.center 

801 except AttributeError: 

802 msg = "Did not recognize coordinates for source type of `{0}`, " 

803 msg += "could not write coordinates or center flux. " 

804 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information." 

805 logger.warning(msg.format(type(scarlet_source))) 

806 return src 

807 xmin, ymin = xy0 

808 src.set(self.modelCenter, Point2D(cx+xmin, cy+ymin)) 

809 

810 # Store the flux at the center of the model and the total 

811 # scarlet flux measurement. 

812 morph = afwDet.multiband.heavyFootprintToImage(heavy).image.array 

813 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0]) 

814 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0]) 

815 src.set(self.modelCenterFlux, morph[cy, cx]) 

816 src.set(self.modelTypeKey, scarlet_source.__class__.__name__) 

817 src.set(self.edgeFluxFlagKey, scarlet_source.isEdge) 

818 # Include the source flux in the model space in the catalog. 

819 # This uses the narrower model PSF, which ensures that all sources 

820 # not located on an edge have all of their flux included in the 

821 # measurement. 

822 src.set(self.scarletFluxKey, flux) 

823 return src