Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of meas_extensions_scarlet. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from functools import partial 

23 

24import numpy as np 

25import scarlet 

26from scarlet.psf import PSF, gaussian 

27from scarlet import PointSource, ExtendedSource, MultiComponentSource, Blend, Frame, Observation 

28 

29import lsst.log 

30import lsst.pex.config as pexConfig 

31from lsst.pex.exceptions import InvalidParameterError 

32import lsst.pipe.base as pipeBase 

33from lsst.geom import Point2I, Box2I, Point2D 

34import lsst.afw.math as afwMath 

35import lsst.afw.geom as afwGeom 

36import lsst.afw.geom.ellipses as afwEll 

37import lsst.afw.image.utils 

38import lsst.afw.image as afwImage 

39import lsst.afw.detection as afwDet 

40import lsst.afw.table as afwTable 

41 

42from .source import initSource, modelToHeavy 

43 

44__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"] 

45 

46logger = lsst.log.Log.getLogger("meas.deblender.deblend") 

47 

48 

49class IncompleteDataError(Exception): 

50 """The PSF could not be computed due to incomplete data 

51 """ 

52 pass 

53 

54 

55class ScarletGradientError(Exception): 

56 """An error occurred during optimization 

57 

58 This error occurs when the optimizer encounters 

59 a NaN value while calculating the gradient. 

60 """ 

61 def __init__(self, iterations, sources): 

62 self.iterations = iterations 

63 self.sources = sources 

64 msg = ("ScalarGradientError in iteration {0}. " 

65 "NaN values introduced in sources {1}") 

66 self.message = msg.format(iterations, sources) 

67 

68 def __str__(self): 

69 return self.message 

70 

71 

72def _checkBlendConvergence(blend, f_rel): 

73 """Check whether or not a blend has converged 

74 """ 

75 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1]) 

76 convergence = f_rel * np.abs(blend.loss[-1]) 

77 return deltaLoss < convergence 

78 

79 

80def _getPsfFwhm(psf): 

81 """Calculate the FWHM of the `psf` 

82 """ 

83 return psf.computeShape().getDeterminantRadius() * 2.35 

84 

85 

86def _estimateRMS(exposure, statsMask): 

87 """Estimate the standard dev. of an image 

88 

89 Calculate the RMS of the `exposure`. 

90 """ 

91 mi = exposure.getMaskedImage() 

92 statsCtrl = afwMath.StatisticsControl() 

93 statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(statsMask)) 

94 stats = afwMath.makeStatistics(mi.variance, mi.mask, afwMath.STDEV | afwMath.MEAN, statsCtrl) 

95 rms = np.sqrt(stats.getValue(afwMath.MEAN)**2 + stats.getValue(afwMath.STDEV)**2) 

96 return rms 

97 

98 

99def _computePsfImage(self, position=None): 

100 """Get a multiband PSF image 

101 The PSF Kernel Image is computed for each band 

102 and combined into a (filter, y, x) array and stored 

103 as `self._psfImage`. 

104 The result is not cached, so if the same PSF is expected 

105 to be used multiple times it is a good idea to store the 

106 result in another variable. 

107 Note: this is a temporary fix during the deblender sprint. 

108 In the future this function will replace the current method 

109 in `afw.MultibandExposure.computePsfImage` (DM-19789). 

110 Parameters 

111 ---------- 

112 position : `Point2D` or `tuple` 

113 Coordinates to evaluate the PSF. If `position` is `None` 

114 then `Psf.getAveragePosition()` is used. 

115 Returns 

116 ------- 

117 self._psfImage: array 

118 The multiband PSF image. 

119 """ 

120 psfs = [] 

121 # Make the coordinates into a Point2D (if necessary) 

122 if not isinstance(position, Point2D) and position is not None: 

123 position = Point2D(position[0], position[1]) 

124 

125 for bidx, single in enumerate(self.singles): 

126 try: 

127 if position is None: 

128 psf = single.getPsf().computeImage() 

129 psfs.append(psf) 

130 else: 

131 psf = single.getPsf().computeImage(position) 

132 psfs.append(psf) 

133 except InvalidParameterError: 

134 # This band failed to compute the PSF due to incomplete data 

135 # at that location. This is unlikely to be a problem for Rubin, 

136 # however the edges of some HSC COSMOS fields contain incomplete 

137 # data in some bands, so we track this error to distinguish it 

138 # from unknown errors. 

139 msg = "Failed to compute PSF at {} in band {}" 

140 raise IncompleteDataError(msg.format(position, self.filters[bidx])) 

141 

142 left = np.min([psf.getBBox().getMinX() for psf in psfs]) 

143 bottom = np.min([psf.getBBox().getMinY() for psf in psfs]) 

144 right = np.max([psf.getBBox().getMaxX() for psf in psfs]) 

145 top = np.max([psf.getBBox().getMaxY() for psf in psfs]) 

146 bbox = Box2I(Point2I(left, bottom), Point2I(right, top)) 

147 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs] 

148 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs) 

149 return psfImage 

150 

151 

152def getFootprintMask(footprint, mExposure): 

153 """Mask pixels outside the footprint 

154 

155 Parameters 

156 ---------- 

157 mExposure : `lsst.image.MultibandExposure` 

158 - The multiband exposure containing the image, 

159 mask, and variance data 

160 footprint : `lsst.detection.Footprint` 

161 - The footprint of the parent to deblend 

162 

163 Returns 

164 ------- 

165 footprintMask : array 

166 Boolean array with pixels not in the footprint set to one. 

167 """ 

168 bbox = footprint.getBBox() 

169 fpMask = afwImage.Mask(bbox) 

170 footprint.spans.setMask(fpMask, 1) 

171 fpMask = ~fpMask.getArray().astype(bool) 

172 return fpMask 

173 

174 

175def deblend(mExposure, footprint, config): 

176 """Deblend a parent footprint 

177 

178 Parameters 

179 ---------- 

180 mExposure : `lsst.image.MultibandExposure` 

181 - The multiband exposure containing the image, 

182 mask, and variance data 

183 footprint : `lsst.detection.Footprint` 

184 - The footprint of the parent to deblend 

185 config : `ScarletDeblendConfig` 

186 - Configuration of the deblending task 

187 """ 

188 # Extract coordinates from each MultiColorPeak 

189 bbox = footprint.getBBox() 

190 

191 # Create the data array from the masked images 

192 images = mExposure.image[:, bbox].array 

193 

194 # Use the inverse variance as the weights 

195 if config.useWeights: 

196 weights = 1/mExposure.variance[:, bbox].array 

197 else: 

198 weights = np.ones_like(images) 

199 

200 # Mask out the pixels outside the footprint 

201 mask = getFootprintMask(footprint, mExposure) 

202 weights *= ~mask 

203 

204 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32) 

205 

206 psfShape = (config.modelPsfSize, config.modelPsfSize) 

207 model_psf = PSF(partial(gaussian, sigma=config.modelPsfSigma), shape=(None,)+psfShape) 

208 

209 frame = Frame(images.shape, psfs=model_psf, channels=mExposure.filters) 

210 observation = Observation(images, psfs=psfs, weights=weights, channels=mExposure.filters) 

211 observation.match(frame) 

212 

213 assert(config.sourceModel in ["single", "double", "point", "fit"]) 

214 

215 # Set the appropriate number of components 

216 if config.sourceModel == "single": 

217 maxComponents = 1 

218 elif config.sourceModel == "double": 

219 maxComponents = 2 

220 elif config.sourceModel == "point": 

221 maxComponents = 0 

222 elif config.sourceModel == "fit": 

223 # It is likely in the future that there will be some heuristic 

224 # used to determine what type of model to use for each source, 

225 # but that has not yet been implemented (see DM-22551) 

226 raise NotImplementedError("sourceModel 'fit' has not been implemented yet") 

227 

228 # Convert the centers to pixel coordinates 

229 xmin = bbox.getMinX() 

230 ymin = bbox.getMinY() 

231 centers = [np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int) for peak in footprint.peaks] 

232 

233 # Only deblend sources that can be initialized 

234 sources = [] 

235 skipped = [] 

236 for k, center in enumerate(centers): 

237 source = initSource( 

238 frame=frame, 

239 center=center, 

240 observation=observation, 

241 symmetric=config.symmetric, 

242 monotonic=config.monotonic, 

243 thresh=config.morphThresh, 

244 maxComponents=maxComponents, 

245 edgeDistance=config.edgeDistance, 

246 shifting=False, 

247 downgrade=config.downgrade, 

248 fallback=config.fallback, 

249 ) 

250 if source is not None: 

251 source.detectedPeak = footprint.peaks[k] 

252 sources.append(source) 

253 else: 

254 skipped.append(k) 

255 

256 blend = Blend(sources, observation) 

257 try: 

258 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError) 

259 except ArithmeticError: 

260 # This occurs when a gradient update produces a NaN value 

261 # This is usually due to a source initialized with a 

262 # negative SED or no flux, often because the peak 

263 # is a noise fluctuation in one band and not a real source. 

264 iterations = len(blend.loss) 

265 failedSources = [] 

266 for k, src in enumerate(sources): 

267 if np.any(~np.isfinite(src.get_model())): 

268 failedSources.append(k) 

269 raise ScarletGradientError(iterations, failedSources) 

270 

271 return blend, skipped 

272 

273 

274class ScarletDeblendConfig(pexConfig.Config): 

275 """MultibandDeblendConfig 

276 

277 Configuration for the multiband deblender. 

278 The parameters are organized by the parameter types, which are 

279 - Stopping Criteria: Used to determine if the fit has converged 

280 - Position Fitting Criteria: Used to fit the positions of the peaks 

281 - Constraints: Used to apply constraints to the peaks and their components 

282 - Other: Parameters that don't fit into the above categories 

283 """ 

284 # Stopping Criteria 

285 maxIter = pexConfig.Field(dtype=int, default=300, 

286 doc=("Maximum number of iterations to deblend a single parent")) 

287 relativeError = pexConfig.Field(dtype=float, default=1e-4, 

288 doc=("Change in the loss function between" 

289 "iterations to exit fitter")) 

290 

291 # Blend Configuration options 

292 edgeDistance = pexConfig.Field(dtype=int, default=1, 

293 doc="All sources with flux within `edgeDistance` from the edge " 

294 "will be considered edge sources.") 

295 

296 # Constraints 

297 morphThresh = pexConfig.Field(dtype=float, default=1, 

298 doc="Fraction of background RMS a pixel must have" 

299 "to be included in the initial morphology") 

300 monotonic = pexConfig.Field(dtype=bool, default=True, doc="Make models monotonic") 

301 symmetric = pexConfig.Field(dtype=bool, default=False, doc="Make models symmetric") 

302 

303 # Other scarlet paremeters 

304 useWeights = pexConfig.Field( 

305 dtype=bool, default=True, 

306 doc=("Whether or not use use inverse variance weighting." 

307 "If `useWeights` is `False` then flat weights are used")) 

308 modelPsfSize = pexConfig.Field( 

309 dtype=int, default=11, 

310 doc="Model PSF side length in pixels") 

311 modelPsfSigma = pexConfig.Field( 

312 dtype=float, default=0.8, 

313 doc="Define sigma for the model frame PSF") 

314 saveTemplates = pexConfig.Field( 

315 dtype=bool, default=True, 

316 doc="Whether or not to save the SEDs and templates") 

317 processSingles = pexConfig.Field( 

318 dtype=bool, default=False, 

319 doc="Whether or not to process isolated sources in the deblender") 

320 sourceModel = pexConfig.Field( 

321 dtype=str, default="single", 

322 doc=("How to determine which model to use for sources, from\n" 

323 "- 'single': use a single component for all sources\n" 

324 "- 'double': use a bulge disk model for all sources\n" 

325 "- 'point: use a point-source model for all sources\n" 

326 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)") 

327 ) 

328 downgrade = pexConfig.Field( 

329 dtype=bool, default=False, 

330 doc="Whether or not to downgrade the number of components for sources in small bounding boxes" 

331 ) 

332 

333 # Mask-plane restrictions 

334 badMask = pexConfig.ListField( 

335 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT"], 

336 doc="Whether or not to process isolated sources in the deblender") 

337 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"], 

338 doc="Mask planes to ignore when performing statistics") 

339 maskLimits = pexConfig.DictField( 

340 keytype=str, 

341 itemtype=float, 

342 default={}, 

343 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. " 

344 "Sources violating this limit will not be deblended."), 

345 ) 

346 

347 # Size restrictions 

348 maxNumberOfPeaks = pexConfig.Field( 

349 dtype=int, default=0, 

350 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent" 

351 " (<= 0: unlimited)")) 

352 maxFootprintArea = pexConfig.Field( 

353 dtype=int, default=1000000, 

354 doc=("Maximum area for footprints before they are ignored as large; " 

355 "non-positive means no threshold applied")) 

356 maxFootprintSize = pexConfig.Field( 

357 dtype=int, default=0, 

358 doc=("Maximum linear dimension for footprints before they are ignored " 

359 "as large; non-positive means no threshold applied")) 

360 minFootprintAxisRatio = pexConfig.Field( 

361 dtype=float, default=0.0, 

362 doc=("Minimum axis ratio for footprints before they are ignored " 

363 "as large; non-positive means no threshold applied")) 

364 

365 # Failure modes 

366 fallback = pexConfig.Field( 

367 dtype=bool, default=True, 

368 doc="Whether or not to fallback to a smaller number of components if a source does not initialize" 

369 ) 

370 notDeblendedMask = pexConfig.Field( 

371 dtype=str, default="NOT_DEBLENDED", optional=True, 

372 doc="Mask name for footprints not deblended, or None") 

373 catchFailures = pexConfig.Field( 

374 dtype=bool, default=False, 

375 doc=("If True, catch exceptions thrown by the deblender, log them, " 

376 "and set a flag on the parent, instead of letting them propagate up")) 

377 propagateAllPeaks = pexConfig.Field(dtype=bool, default=False, 

378 doc=('Guarantee that all peaks produce a child source.')) 

379 

380 

381class ScarletDeblendTask(pipeBase.Task): 

382 """ScarletDeblendTask 

383 

384 Split blended sources into individual sources. 

385 

386 This task has no return value; it only modifies the SourceCatalog in-place. 

387 """ 

388 ConfigClass = ScarletDeblendConfig 

389 _DefaultName = "scarletDeblend" 

390 

391 def __init__(self, schema, peakSchema=None, **kwargs): 

392 """Create the task, adding necessary fields to the given schema. 

393 

394 Parameters 

395 ---------- 

396 schema : `lsst.afw.table.schema.schema.Schema` 

397 Schema object for measurement fields; will be modified in-place. 

398 peakSchema : `lsst.afw.table.schema.schema.Schema` 

399 Schema of Footprint Peaks that will be passed to the deblender. 

400 Any fields beyond the PeakTable minimal schema will be transferred 

401 to the main source Schema. If None, no fields will be transferred 

402 from the Peaks. 

403 filters : list of str 

404 Names of the filters used for the eposures. This is needed to store 

405 the SED as a field 

406 **kwargs 

407 Passed to Task.__init__. 

408 """ 

409 pipeBase.Task.__init__(self, **kwargs) 

410 

411 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema() 

412 if peakSchema is None: 

413 # In this case, the peakSchemaMapper will transfer nothing, but 

414 # we'll still have one 

415 # to simplify downstream code 

416 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema) 

417 else: 

418 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema) 

419 for item in peakSchema: 

420 if item.key not in peakMinimalSchema: 

421 self.peakSchemaMapper.addMapping(item.key, item.field) 

422 # Because SchemaMapper makes a copy of the output schema 

423 # you give its ctor, it isn't updating this Schema in 

424 # place. That's probably a design flaw, but in the 

425 # meantime, we'll keep that schema in sync with the 

426 # peakSchemaMapper.getOutputSchema() manually, by adding 

427 # the same fields to both. 

428 schema.addField(item.field) 

429 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas" 

430 self._addSchemaKeys(schema) 

431 self.schema = schema 

432 

433 def _addSchemaKeys(self, schema): 

434 """Add deblender specific keys to the schema 

435 """ 

436 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms') 

437 

438 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge') 

439 

440 self.nChildKey = schema.addField('deblend_nChild', type=np.int32, 

441 doc='Number of children this object has (defaults to 0)') 

442 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag', 

443 doc='Deblender thought this source looked like a PSF') 

444 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag', 

445 doc='Source had too many peaks; ' 

446 'only the brightest were included') 

447 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag', 

448 doc='Parent footprint covered too many pixels') 

449 self.maskedKey = schema.addField('deblend_masked', type='Flag', 

450 doc='Parent footprint was predominantly masked') 

451 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag', 

452 doc='scarlet sed optimization did not converge before' 

453 'config.maxIter') 

454 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag', 

455 doc='scarlet morph optimization did not converge before' 

456 'config.maxIter') 

457 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag', 

458 type='Flag', 

459 doc='at least one source in the blend' 

460 'failed to converge') 

461 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag', 

462 doc='Source had flux on the edge of the parent footprint') 

463 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag', 

464 doc="Deblending failed on source") 

465 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25, 

466 doc='Name of error if the blend failed') 

467 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag', 

468 doc="Deblender skipped this source") 

469 self.modelCenter = afwTable.Point2DKey.addFields(schema, name="deblend_peak_center", 

470 doc="Center used to apply constraints in scarlet", 

471 unit="pixel") 

472 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count', 

473 doc="The instFlux at the peak position of deblended mode") 

474 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=20, 

475 doc="The type of model used, for example " 

476 "MultiComponentSource, ExtendedSource, PointSource") 

477 self.edgeFluxFlagKey = schema.addField("deblend_edgeFluxFlag", type="Flag", 

478 doc="Source has flux on the edge of the image") 

479 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32, 

480 doc="Flux measurement from scarlet") 

481 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32, 

482 doc="Number of initial peaks in the blend. " 

483 "This includes peaks that may have been culled " 

484 "during deblending or failed to deblend") 

485 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32, 

486 doc="Final logL, used to identify regressions in scarlet.") 

487 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in 

488 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey)) 

489 # ) 

490 

491 @pipeBase.timeMethod 

492 def run(self, mExposure, mergedSources): 

493 """Get the psf from each exposure and then run deblend(). 

494 

495 Parameters 

496 ---------- 

497 mExposure : `MultibandExposure` 

498 The exposures should be co-added images of the same 

499 shape and region of the sky. 

500 mergedSources : `SourceCatalog` 

501 The merged `SourceCatalog` that contains parent footprints 

502 to (potentially) deblend. 

503 

504 Returns 

505 ------- 

506 fluxCatalogs: dict or None 

507 Keys are the names of the filters and the values are 

508 `lsst.afw.table.source.source.SourceCatalog`'s. 

509 These are the flux-conserved catalogs with heavy footprints with 

510 the image data weighted by the multiband templates. 

511 If `self.config.conserveFlux` is `False`, then this item will be 

512 None 

513 templateCatalogs: dict or None 

514 Keys are the names of the filters and the values are 

515 `lsst.afw.table.source.source.SourceCatalog`'s. 

516 These are catalogs with heavy footprints that are the templates 

517 created by the multiband templates. 

518 If `self.config.saveTemplates` is `False`, then this item will be 

519 None 

520 """ 

521 return self.deblend(mExposure, mergedSources) 

522 

523 @pipeBase.timeMethod 

524 def deblend(self, mExposure, sources): 

525 """Deblend a data cube of multiband images 

526 

527 Parameters 

528 ---------- 

529 mExposure : `MultibandExposure` 

530 The exposures should be co-added images of the same 

531 shape and region of the sky. 

532 sources : `SourceCatalog` 

533 The merged `SourceCatalog` that contains parent footprints 

534 to (potentially) deblend. 

535 

536 Returns 

537 ------- 

538 fluxCatalogs : dict or None 

539 Keys are the names of the filters and the values are 

540 `lsst.afw.table.source.source.SourceCatalog`'s. 

541 These are the flux-conserved catalogs with heavy footprints with 

542 the image data weighted by the multiband templates. 

543 If `self.config.conserveFlux` is `False`, then this item will be 

544 None 

545 templateCatalogs : dict or None 

546 Keys are the names of the filters and the values are 

547 `lsst.afw.table.source.source.SourceCatalog`'s. 

548 These are catalogs with heavy footprints that are the templates 

549 created by the multiband templates. 

550 If `self.config.saveTemplates` is `False`, then this item will be 

551 None 

552 """ 

553 import time 

554 

555 filters = mExposure.filters 

556 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure))) 

557 

558 # Create the output catalogs 

559 templateCatalogs = {} 

560 # This must be returned but is not calculated right now, setting it to 

561 # None to be consistent with doc string 

562 fluxCatalogs = None 

563 for f in filters: 

564 _catalog = afwTable.SourceCatalog(sources.table.clone()) 

565 _catalog.extend(sources) 

566 templateCatalogs[f] = _catalog 

567 

568 n0 = len(sources) 

569 nparents = 0 

570 for pk, src in enumerate(sources): 

571 foot = src.getFootprint() 

572 bbox = foot.getBBox() 

573 logger.info("id: {0}".format(src["id"])) 

574 peaks = foot.getPeaks() 

575 

576 # Since we use the first peak for the parent object, we should 

577 # propagate its flags to the parent source. 

578 src.assign(peaks[0], self.peakSchemaMapper) 

579 

580 # Block of Skipping conditions 

581 if len(peaks) < 2 and not self.config.processSingles: 

582 for f in filters: 

583 templateCatalogs[f][pk].set(self.runtimeKey, 0) 

584 continue 

585 if self._isLargeFootprint(foot): 

586 src.set(self.tooBigKey, True) 

587 self._skipParent(src, mExposure.mask) 

588 self.log.trace('Parent %i: skipping large footprint', int(src.getId())) 

589 continue 

590 if self._isMasked(foot, mExposure): 

591 src.set(self.maskedKey, True) 

592 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

593 mask = afwImage.MaskX(mask, xy0=bbox.getMin()) 

594 self._skipParent(src, mask) 

595 self.log.trace('Parent %i: skipping masked footprint', int(src.getId())) 

596 continue 

597 if len(peaks) > self.config.maxNumberOfPeaks: 

598 src.set(self.tooManyPeaksKey, True) 

599 msg = 'Parent {0}: Too many peaks, using the first {1} peaks' 

600 self.log.trace(msg.format(int(src.getId()), self.config.maxNumberOfPeaks)) 

601 

602 nparents += 1 

603 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks)) 

604 # Run the deblender 

605 blendError = None 

606 try: 

607 t0 = time.time() 

608 # Build the parameter lists with the same ordering 

609 blend, skipped = deblend(mExposure, foot, self.config) 

610 tf = time.time() 

611 runtime = (tf-t0)*1000 

612 src.set(self.deblendFailedKey, False) 

613 src.set(self.runtimeKey, runtime) 

614 converged = _checkBlendConvergence(blend, self.config.relativeError) 

615 src.set(self.blendConvergenceFailedFlagKey, converged) 

616 sources = [src for src in blend.sources] 

617 # Re-insert place holders for skipped sources 

618 # to propagate them in the catalog so 

619 # that the peaks stay consistent 

620 for k in skipped: 

621 sources.insert(k, None) 

622 # Catch all errors and filter out the ones that we know about 

623 except Exception as e: 

624 blendError = type(e).__name__ 

625 if isinstance(e, ScarletGradientError): 

626 src.set(self.iterKey, e.iterations) 

627 elif not isinstance(e, IncompleteDataError): 

628 blendError = "UnknownError" 

629 

630 if self.config.catchFailures: 

631 # Make it easy to find UnknownErrors in the log file 

632 self.log.warn("UnknownError") 

633 import traceback 

634 traceback.print_exc() 

635 else: 

636 raise 

637 

638 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), blendError)) 

639 src.set(self.deblendFailedKey, True) 

640 src.set(self.runtimeKey, 0) 

641 src.set(self.deblendErrorKey, blendError) 

642 bbox = foot.getBBox() 

643 src.set(self.modelCenter, Point2D(bbox.getMinX(), bbox.getMinY())) 

644 # We want to store the total number of initial peaks, 

645 # even if some of them fail 

646 src.set(self.nPeaksKey, len(foot.peaks)) 

647 continue 

648 

649 # Add the merged source as a parent in the catalog for each band 

650 templateParents = {} 

651 parentId = src.getId() 

652 for f in filters: 

653 templateParents[f] = templateCatalogs[f][pk] 

654 templateParents[f].set(self.nPeaksKey, len(foot.peaks)) 

655 templateParents[f].set(self.runtimeKey, runtime) 

656 templateParents[f].set(self.iterKey, len(blend.loss)) 

657 # TODO: When DM-26603 is merged observation has a "log_norm" 

658 # property that performs the following calculation, 

659 # so this code block can be removed 

660 observation = blend.observations[0] 

661 _weights = observation.weights 

662 _images = observation.images 

663 log_sigma = np.zeros(_weights.shape, dtype=_weights.dtype) 

664 cuts = _weights > 0 

665 log_sigma[cuts] = np.log(1/_weights[cuts]) 

666 log_norm = np.prod(_images.shape)/2 * np.log(2*np.pi)+np.sum(log_sigma)/2 

667 # end temporary code block 

668 logL = blend.loss[-1]-log_norm 

669 templateParents[f].set(self.scarletLogLKey, logL) 

670 

671 # Add each source to the catalogs in each band 

672 templateSpans = {f: afwGeom.SpanSet() for f in filters} 

673 nchild = 0 

674 for k, source in enumerate(sources): 

675 # Skip any sources with no flux or that scarlet skipped because 

676 # it could not initialize 

677 if k in skipped: 

678 if not self.config.propagateAllPeaks: 

679 # We don't care 

680 continue 

681 # We need to preserve the peak: make sure we have enough 

682 # info to create a minimal child src 

683 msg = "Peak at {0} failed deblending. Using minimal default info for child." 

684 self.log.trace(msg.format(src.getFootprint().peaks[k])) 

685 # copy the full footprint and strip out extra peaks 

686 foot = afwDet.Footprint(src.getFootprint()) 

687 peakList = foot.getPeaks() 

688 peakList.clear() 

689 peakList.append(src.peaks[k]) 

690 zeroMimg = afwImage.MaskedImageF(foot.getBBox()) 

691 heavy = afwDet.makeHeavyFootprint(foot, zeroMimg) 

692 models = afwDet.MultibandFootprint(mExposure.filters, [heavy]*len(mExposure.filters)) 

693 else: 

694 src.set(self.deblendSkippedKey, False) 

695 models = modelToHeavy(source, filters, xy0=bbox.getMin(), 

696 observation=blend.observations[0]) 

697 # TODO: We should eventually write the morphology and SED to 

698 # the catalog 

699 # morph = source.morphToHeavy(xy0=bbox.getMin()) 

700 # sed = source.sed / source.sed.sum() 

701 

702 flux = scarlet.measure.flux(source) 

703 for fidx, f in enumerate(filters): 

704 if len(models[f].getPeaks()) != 1: 

705 err = "Heavy footprint should have a single peak, got {0}" 

706 raise ValueError(err.format(len(models[f].peaks))) 

707 cat = templateCatalogs[f] 

708 child = self._addChild(parentId, cat, models[f], source, converged, 

709 xy0=bbox.getMin(), flux=flux[fidx]) 

710 if parentId == 0: 

711 child.setId(src.getId()) 

712 child.set(self.runtimeKey, runtime) 

713 else: 

714 templateSpans[f] = templateSpans[f].union(models[f].getSpans()) 

715 nchild += 1 

716 

717 # Child footprints may extend beyond the full extent of their 

718 # parent's which results in a failure of the replace-by-noise code 

719 # to reinstate these pixels to their original values. The 

720 # following updates the parent footprint in-place to ensure it 

721 # contains the full union of itself and all of its 

722 # children's footprints. 

723 for f in filters: 

724 templateParents[f].set(self.nChildKey, nchild) 

725 templateParents[f].getFootprint().setSpans(templateSpans[f]) 

726 

727 K = len(list(templateCatalogs.values())[0]) 

728 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources' 

729 % (n0, nparents, K-n0, K)) 

730 return fluxCatalogs, templateCatalogs 

731 

732 def _isLargeFootprint(self, footprint): 

733 """Returns whether a Footprint is large 

734 

735 'Large' is defined by thresholds on the area, size and axis ratio. 

736 These may be disabled independently by configuring them to be 

737 non-positive. 

738 

739 This is principally intended to get rid of satellite streaks, which the 

740 deblender or other downstream processing can have trouble dealing with 

741 (e.g., multiple large HeavyFootprints can chew up memory). 

742 """ 

743 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea: 

744 return True 

745 if self.config.maxFootprintSize > 0: 

746 bbox = footprint.getBBox() 

747 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize: 

748 return True 

749 if self.config.minFootprintAxisRatio > 0: 

750 axes = afwEll.Axes(footprint.getShape()) 

751 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA(): 

752 return True 

753 return False 

754 

755 def _isMasked(self, footprint, mExposure): 

756 """Returns whether the footprint violates the mask limits""" 

757 bbox = footprint.getBBox() 

758 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

759 size = float(footprint.getArea()) 

760 for maskName, limit in self.config.maskLimits.items(): 

761 maskVal = mExposure.mask.getPlaneBitMask(maskName) 

762 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin()) 

763 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels 

764 if (size - unmaskedSpan.getArea())/size > limit: 

765 return True 

766 return False 

767 

768 def _skipParent(self, source, masks): 

769 """Indicate that the parent source is not being deblended 

770 

771 We set the appropriate flags and masks for each exposure. 

772 

773 Parameters 

774 ---------- 

775 source : `lsst.afw.table.source.source.SourceRecord` 

776 The source to flag as skipped 

777 masks : list of `lsst.afw.image.MaskX` 

778 The mask in each band to update with the non-detection 

779 """ 

780 fp = source.getFootprint() 

781 source.set(self.deblendSkippedKey, True) 

782 source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all 

783 if self.config.notDeblendedMask: 

784 for mask in masks: 

785 mask.addMaskPlane(self.config.notDeblendedMask) 

786 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask)) 

787 

788 def _addChild(self, parentId, sources, heavy, scarlet_source, blend_converged, xy0, flux): 

789 """Add a child to a catalog 

790 

791 This creates a new child in the source catalog, 

792 assigning it a parent id, adding a footprint, 

793 and setting all appropriate flags based on the 

794 deblender result. 

795 """ 

796 assert len(heavy.getPeaks()) == 1 

797 src = sources.addNew() 

798 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper) 

799 src.setParent(parentId) 

800 src.setFootprint(heavy) 

801 src.set(self.psfKey, False) 

802 src.set(self.runtimeKey, 0) 

803 src.set(self.blendConvergenceFailedFlagKey, not blend_converged) 

804 if isinstance(scarlet_source, ExtendedSource): 

805 cy, cx = scarlet_source.pixel_center 

806 morph = scarlet_source.morph 

807 elif isinstance(scarlet_source, MultiComponentSource): 

808 cy, cx = scarlet_source.components[0].pixel_center 

809 morph = scarlet_source.components[0].morph 

810 elif isinstance(scarlet_source, PointSource): 

811 cy, cx = scarlet_source.parameters[1] 

812 morph = scarlet_source.morph 

813 else: 

814 msg = "Did not recognize source type of `{0}`, could not write coordinates or center flux. " 

815 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information." 

816 logger.warning(msg.format(type(scarlet_source))) 

817 return src 

818 xmin, ymin = xy0 

819 src.set(self.modelCenter, Point2D(cx+xmin, cy+ymin)) 

820 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0]) 

821 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0]) 

822 src.set(self.modelCenterFlux, morph[cy, cx]) 

823 src.set(self.modelTypeKey, scarlet_source.__class__.__name__) 

824 src.set(self.edgeFluxFlagKey, scarlet_source.isEdge) 

825 # Include the source flux in the model space in the catalog. 

826 # This uses the narrower model PSF, which ensures that all sources 

827 # not located on an edge have all of their flux included in the 

828 # measurement. 

829 src.set(self.scarletFluxKey, flux) 

830 return src