Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of meas_extensions_scarlet. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22import numpy as np 

23import scarlet 

24from scarlet.psf import ImagePSF, GaussianPSF 

25from scarlet import Blend, Frame, Observation 

26from scarlet.initialization import initAllSources 

27 

28import lsst.log 

29import lsst.pex.config as pexConfig 

30from lsst.pex.exceptions import InvalidParameterError 

31import lsst.pipe.base as pipeBase 

32from lsst.geom import Point2I, Box2I, Point2D 

33import lsst.afw.geom.ellipses as afwEll 

34import lsst.afw.image.utils 

35import lsst.afw.image as afwImage 

36import lsst.afw.detection as afwDet 

37import lsst.afw.table as afwTable 

38 

39from .source import modelToHeavy 

40 

41__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"] 

42 

43logger = lsst.log.Log.getLogger("meas.deblender.deblend") 

44 

45 

46class IncompleteDataError(Exception): 

47 """The PSF could not be computed due to incomplete data 

48 """ 

49 pass 

50 

51 

52class ScarletGradientError(Exception): 

53 """An error occurred during optimization 

54 

55 This error occurs when the optimizer encounters 

56 a NaN value while calculating the gradient. 

57 """ 

58 def __init__(self, iterations, sources): 

59 self.iterations = iterations 

60 self.sources = sources 

61 msg = ("ScalarGradientError in iteration {0}. " 

62 "NaN values introduced in sources {1}") 

63 self.message = msg.format(iterations, sources) 

64 

65 def __str__(self): 

66 return self.message 

67 

68 

69def _checkBlendConvergence(blend, f_rel): 

70 """Check whether or not a blend has converged 

71 """ 

72 deltaLoss = np.abs(blend.loss[-2] - blend.loss[-1]) 

73 convergence = f_rel * np.abs(blend.loss[-1]) 

74 return deltaLoss < convergence 

75 

76 

77def _getPsfFwhm(psf): 

78 """Calculate the FWHM of the `psf` 

79 """ 

80 return psf.computeShape().getDeterminantRadius() * 2.35 

81 

82 

83def _computePsfImage(self, position=None): 

84 """Get a multiband PSF image 

85 The PSF Kernel Image is computed for each band 

86 and combined into a (filter, y, x) array and stored 

87 as `self._psfImage`. 

88 The result is not cached, so if the same PSF is expected 

89 to be used multiple times it is a good idea to store the 

90 result in another variable. 

91 Note: this is a temporary fix during the deblender sprint. 

92 In the future this function will replace the current method 

93 in `afw.MultibandExposure.computePsfImage` (DM-19789). 

94 Parameters 

95 ---------- 

96 position : `Point2D` or `tuple` 

97 Coordinates to evaluate the PSF. If `position` is `None` 

98 then `Psf.getAveragePosition()` is used. 

99 Returns 

100 ------- 

101 self._psfImage: array 

102 The multiband PSF image. 

103 """ 

104 psfs = [] 

105 # Make the coordinates into a Point2D (if necessary) 

106 if not isinstance(position, Point2D) and position is not None: 

107 position = Point2D(position[0], position[1]) 

108 

109 for bidx, single in enumerate(self.singles): 

110 try: 

111 if position is None: 

112 psf = single.getPsf().computeImage() 

113 psfs.append(psf) 

114 else: 

115 psf = single.getPsf().computeKernelImage(position) 

116 psfs.append(psf) 

117 except InvalidParameterError: 

118 # This band failed to compute the PSF due to incomplete data 

119 # at that location. This is unlikely to be a problem for Rubin, 

120 # however the edges of some HSC COSMOS fields contain incomplete 

121 # data in some bands, so we track this error to distinguish it 

122 # from unknown errors. 

123 msg = "Failed to compute PSF at {} in band {}" 

124 raise IncompleteDataError(msg.format(position, self.filters[bidx])) 

125 

126 left = np.min([psf.getBBox().getMinX() for psf in psfs]) 

127 bottom = np.min([psf.getBBox().getMinY() for psf in psfs]) 

128 right = np.max([psf.getBBox().getMaxX() for psf in psfs]) 

129 top = np.max([psf.getBBox().getMaxY() for psf in psfs]) 

130 bbox = Box2I(Point2I(left, bottom), Point2I(right, top)) 

131 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs] 

132 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs) 

133 return psfImage 

134 

135 

136def getFootprintMask(footprint, mExposure): 

137 """Mask pixels outside the footprint 

138 

139 Parameters 

140 ---------- 

141 mExposure : `lsst.image.MultibandExposure` 

142 - The multiband exposure containing the image, 

143 mask, and variance data 

144 footprint : `lsst.detection.Footprint` 

145 - The footprint of the parent to deblend 

146 

147 Returns 

148 ------- 

149 footprintMask : array 

150 Boolean array with pixels not in the footprint set to one. 

151 """ 

152 bbox = footprint.getBBox() 

153 fpMask = afwImage.Mask(bbox) 

154 footprint.spans.setMask(fpMask, 1) 

155 fpMask = ~fpMask.getArray().astype(bool) 

156 return fpMask 

157 

158 

159def deblend(mExposure, footprint, config): 

160 """Deblend a parent footprint 

161 

162 Parameters 

163 ---------- 

164 mExposure : `lsst.image.MultibandExposure` 

165 - The multiband exposure containing the image, 

166 mask, and variance data 

167 footprint : `lsst.detection.Footprint` 

168 - The footprint of the parent to deblend 

169 config : `ScarletDeblendConfig` 

170 - Configuration of the deblending task 

171 """ 

172 # Extract coordinates from each MultiColorPeak 

173 bbox = footprint.getBBox() 

174 

175 # Create the data array from the masked images 

176 images = mExposure.image[:, bbox].array 

177 

178 # Use the inverse variance as the weights 

179 if config.useWeights: 

180 weights = 1/mExposure.variance[:, bbox].array 

181 else: 

182 weights = np.ones_like(images) 

183 badPixels = mExposure.mask.getPlaneBitMask(config.badMask) 

184 mask = mExposure.mask[:, bbox].array & badPixels 

185 weights[mask > 0] = 0 

186 

187 # Mask out the pixels outside the footprint 

188 mask = getFootprintMask(footprint, mExposure) 

189 weights *= ~mask 

190 

191 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32) 

192 psfs = ImagePSF(psfs) 

193 model_psf = GaussianPSF(sigma=(config.modelPsfSigma,)*len(mExposure.filters)) 

194 

195 frame = Frame(images.shape, psfs=model_psf, channels=mExposure.filters) 

196 observation = Observation(images, psfs=psfs, weights=weights, channels=mExposure.filters) 

197 observation.match(frame) 

198 

199 assert(config.sourceModel in ["single", "double", "point", "fit"]) 

200 

201 # Set the appropriate number of components 

202 if config.sourceModel == "single": 

203 maxComponents = 1 

204 elif config.sourceModel == "double": 

205 maxComponents = 2 

206 elif config.sourceModel == "compact": 

207 raise NotImplementedError("CompactSource initialization has not yet been ported" 

208 "to the stack version of scarlet") 

209 elif config.sourceModel == "point": 

210 maxComponents = 0 

211 elif config.sourceModel == "fit": 

212 # It is likely in the future that there will be some heuristic 

213 # used to determine what type of model to use for each source, 

214 # but that has not yet been implemented (see DM-22551) 

215 raise NotImplementedError("sourceModel 'fit' has not been implemented yet") 

216 

217 # Convert the centers to pixel coordinates 

218 xmin = bbox.getMinX() 

219 ymin = bbox.getMinY() 

220 centers = [np.array([peak.getIy()-ymin, peak.getIx()-xmin], dtype=int) for peak in footprint.peaks] 

221 

222 # Only deblend sources that can be initialized 

223 sources, skipped = initAllSources( 

224 frame=frame, 

225 centers=centers, 

226 observation=observation, 

227 symmetric=config.symmetric, 

228 monotonic=config.monotonic, 

229 thresh=config.morphThresh, 

230 maxComponents=maxComponents, 

231 edgeDistance=config.edgeDistance, 

232 shifting=False, 

233 downgrade=config.downgrade, 

234 fallback=config.fallback, 

235 ) 

236 

237 # Attach the peak to all of the initialized sources 

238 srcIndex = 0 

239 for k, center in enumerate(centers): 

240 if k not in skipped: 

241 # This is just to make sure that there isn't a coding bug 

242 assert np.all(sources[srcIndex].center == center) 

243 # Store the record for the peak with the appropriate source 

244 sources[srcIndex].detectedPeak = footprint.peaks[k] 

245 srcIndex += 1 

246 

247 # Create the blend and attempt to optimize it 

248 blend = Blend(sources, observation) 

249 try: 

250 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError) 

251 except ArithmeticError: 

252 # This occurs when a gradient update produces a NaN value 

253 # This is usually due to a source initialized with a 

254 # negative SED or no flux, often because the peak 

255 # is a noise fluctuation in one band and not a real source. 

256 iterations = len(blend.loss) 

257 failedSources = [] 

258 for k, src in enumerate(sources): 

259 if np.any(~np.isfinite(src.get_model())): 

260 failedSources.append(k) 

261 raise ScarletGradientError(iterations, failedSources) 

262 

263 return blend, skipped 

264 

265 

266class ScarletDeblendConfig(pexConfig.Config): 

267 """MultibandDeblendConfig 

268 

269 Configuration for the multiband deblender. 

270 The parameters are organized by the parameter types, which are 

271 - Stopping Criteria: Used to determine if the fit has converged 

272 - Position Fitting Criteria: Used to fit the positions of the peaks 

273 - Constraints: Used to apply constraints to the peaks and their components 

274 - Other: Parameters that don't fit into the above categories 

275 """ 

276 # Stopping Criteria 

277 maxIter = pexConfig.Field(dtype=int, default=300, 

278 doc=("Maximum number of iterations to deblend a single parent")) 

279 relativeError = pexConfig.Field(dtype=float, default=1e-4, 

280 doc=("Change in the loss function between" 

281 "iterations to exit fitter")) 

282 

283 # Blend Configuration options 

284 edgeDistance = pexConfig.Field(dtype=int, default=1, 

285 doc="All sources with flux within `edgeDistance` from the edge " 

286 "will be considered edge sources.") 

287 

288 # Constraints 

289 morphThresh = pexConfig.Field(dtype=float, default=1, 

290 doc="Fraction of background RMS a pixel must have" 

291 "to be included in the initial morphology") 

292 monotonic = pexConfig.Field(dtype=bool, default=True, doc="Make models monotonic") 

293 symmetric = pexConfig.Field(dtype=bool, default=False, doc="Make models symmetric") 

294 

295 # Other scarlet paremeters 

296 useWeights = pexConfig.Field( 

297 dtype=bool, default=True, 

298 doc=("Whether or not use use inverse variance weighting." 

299 "If `useWeights` is `False` then flat weights are used")) 

300 modelPsfSize = pexConfig.Field( 

301 dtype=int, default=11, 

302 doc="Model PSF side length in pixels") 

303 modelPsfSigma = pexConfig.Field( 

304 dtype=float, default=0.8, 

305 doc="Define sigma for the model frame PSF") 

306 saveTemplates = pexConfig.Field( 

307 dtype=bool, default=True, 

308 doc="Whether or not to save the SEDs and templates") 

309 processSingles = pexConfig.Field( 

310 dtype=bool, default=True, 

311 doc="Whether or not to process isolated sources in the deblender") 

312 sourceModel = pexConfig.Field( 

313 dtype=str, default="single", 

314 doc=("How to determine which model to use for sources, from\n" 

315 "- 'single': use a single component for all sources\n" 

316 "- 'double': use a bulge disk model for all sources\n" 

317 "- 'compact': use a single component model, initialzed with a point source morphology, " 

318 " for all sources\n" 

319 "- 'point': use a point-source model for all sources\n" 

320 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)") 

321 ) 

322 downgrade = pexConfig.Field( 

323 dtype=bool, default=False, 

324 doc="Whether or not to downgrade the number of components for sources in small bounding boxes" 

325 ) 

326 

327 # Mask-plane restrictions 

328 badMask = pexConfig.ListField( 

329 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT"], 

330 doc="Whether or not to process isolated sources in the deblender") 

331 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"], 

332 doc="Mask planes to ignore when performing statistics") 

333 maskLimits = pexConfig.DictField( 

334 keytype=str, 

335 itemtype=float, 

336 default={}, 

337 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. " 

338 "Sources violating this limit will not be deblended."), 

339 ) 

340 

341 # Size restrictions 

342 maxNumberOfPeaks = pexConfig.Field( 

343 dtype=int, default=0, 

344 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent" 

345 " (<= 0: unlimited)")) 

346 maxFootprintArea = pexConfig.Field( 

347 dtype=int, default=1000000, 

348 doc=("Maximum area for footprints before they are ignored as large; " 

349 "non-positive means no threshold applied")) 

350 maxFootprintSize = pexConfig.Field( 

351 dtype=int, default=0, 

352 doc=("Maximum linear dimension for footprints before they are ignored " 

353 "as large; non-positive means no threshold applied")) 

354 minFootprintAxisRatio = pexConfig.Field( 

355 dtype=float, default=0.0, 

356 doc=("Minimum axis ratio for footprints before they are ignored " 

357 "as large; non-positive means no threshold applied")) 

358 

359 # Failure modes 

360 fallback = pexConfig.Field( 

361 dtype=bool, default=True, 

362 doc="Whether or not to fallback to a smaller number of components if a source does not initialize" 

363 ) 

364 notDeblendedMask = pexConfig.Field( 

365 dtype=str, default="NOT_DEBLENDED", optional=True, 

366 doc="Mask name for footprints not deblended, or None") 

367 catchFailures = pexConfig.Field( 

368 dtype=bool, default=True, 

369 doc=("If True, catch exceptions thrown by the deblender, log them, " 

370 "and set a flag on the parent, instead of letting them propagate up")) 

371 

372 

373class ScarletDeblendTask(pipeBase.Task): 

374 """ScarletDeblendTask 

375 

376 Split blended sources into individual sources. 

377 

378 This task has no return value; it only modifies the SourceCatalog in-place. 

379 """ 

380 ConfigClass = ScarletDeblendConfig 

381 _DefaultName = "scarletDeblend" 

382 

383 def __init__(self, schema, peakSchema=None, **kwargs): 

384 """Create the task, adding necessary fields to the given schema. 

385 

386 Parameters 

387 ---------- 

388 schema : `lsst.afw.table.schema.schema.Schema` 

389 Schema object for measurement fields; will be modified in-place. 

390 peakSchema : `lsst.afw.table.schema.schema.Schema` 

391 Schema of Footprint Peaks that will be passed to the deblender. 

392 Any fields beyond the PeakTable minimal schema will be transferred 

393 to the main source Schema. If None, no fields will be transferred 

394 from the Peaks. 

395 filters : list of str 

396 Names of the filters used for the eposures. This is needed to store 

397 the SED as a field 

398 **kwargs 

399 Passed to Task.__init__. 

400 """ 

401 pipeBase.Task.__init__(self, **kwargs) 

402 

403 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema() 

404 if peakSchema is None: 

405 # In this case, the peakSchemaMapper will transfer nothing, but 

406 # we'll still have one 

407 # to simplify downstream code 

408 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema) 

409 else: 

410 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema) 

411 for item in peakSchema: 

412 if item.key not in peakMinimalSchema: 

413 self.peakSchemaMapper.addMapping(item.key, item.field) 

414 # Because SchemaMapper makes a copy of the output schema 

415 # you give its ctor, it isn't updating this Schema in 

416 # place. That's probably a design flaw, but in the 

417 # meantime, we'll keep that schema in sync with the 

418 # peakSchemaMapper.getOutputSchema() manually, by adding 

419 # the same fields to both. 

420 schema.addField(item.field) 

421 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas" 

422 self._addSchemaKeys(schema) 

423 self.schema = schema 

424 

425 def _addSchemaKeys(self, schema): 

426 """Add deblender specific keys to the schema 

427 """ 

428 self.runtimeKey = schema.addField('deblend_runtime', type=np.float32, doc='runtime in ms') 

429 

430 self.iterKey = schema.addField('deblend_iterations', type=np.int32, doc='iterations to converge') 

431 

432 self.nChildKey = schema.addField('deblend_nChild', type=np.int32, 

433 doc='Number of children this object has (defaults to 0)') 

434 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag', 

435 doc='Deblender thought this source looked like a PSF') 

436 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag', 

437 doc='Source had too many peaks; ' 

438 'only the brightest were included') 

439 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag', 

440 doc='Parent footprint covered too many pixels') 

441 self.maskedKey = schema.addField('deblend_masked', type='Flag', 

442 doc='Parent footprint was predominantly masked') 

443 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag', 

444 doc='scarlet sed optimization did not converge before' 

445 'config.maxIter') 

446 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag', 

447 doc='scarlet morph optimization did not converge before' 

448 'config.maxIter') 

449 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag', 

450 type='Flag', 

451 doc='at least one source in the blend' 

452 'failed to converge') 

453 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag', 

454 doc='Source had flux on the edge of the parent footprint') 

455 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag', 

456 doc="Deblending failed on source") 

457 self.deblendErrorKey = schema.addField('deblend_error', type="String", size=25, 

458 doc='Name of error if the blend failed') 

459 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag', 

460 doc="Deblender skipped this source") 

461 self.peakCenter = afwTable.Point2IKey.addFields(schema, name="deblend_peak_center", 

462 doc="Center used to apply constraints in scarlet", 

463 unit="pixel") 

464 self.peakIdKey = schema.addField("deblend_peakId", type=np.int32, 

465 doc="ID of the peak in the parent footprint. " 

466 "This is not unique, but the combination of 'parent'" 

467 "and 'peakId' should be for all child sources. " 

468 "Top level blends with no parents have 'peakId=0'") 

469 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count', 

470 doc="The instFlux at the peak position of deblended mode") 

471 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=20, 

472 doc="The type of model used, for example " 

473 "MultiExtendedSource, SingleExtendedSource, PointSource") 

474 self.edgeFluxFlagKey = schema.addField("deblend_edgeFluxFlag", type="Flag", 

475 doc="Source has flux on the edge of the image") 

476 self.nPeaksKey = schema.addField("deblend_nPeaks", type=np.int32, 

477 doc="Number of initial peaks in the blend. " 

478 "This includes peaks that may have been culled " 

479 "during deblending or failed to deblend") 

480 self.parentNPeaksKey = schema.addField("deblend_parentNPeaks", type=np.int32, 

481 doc="Same as deblend_n_peaks, but the number of peaks " 

482 "in the parent footprint") 

483 self.scarletFluxKey = schema.addField("deblend_scarletFlux", type=np.float32, 

484 doc="Flux measurement from scarlet") 

485 self.scarletLogLKey = schema.addField("deblend_logL", type=np.float32, 

486 doc="Final logL, used to identify regressions in scarlet.") 

487 

488 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in 

489 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey)) 

490 # ) 

491 

492 @pipeBase.timeMethod 

493 def run(self, mExposure, mergedSources): 

494 """Get the psf from each exposure and then run deblend(). 

495 

496 Parameters 

497 ---------- 

498 mExposure : `MultibandExposure` 

499 The exposures should be co-added images of the same 

500 shape and region of the sky. 

501 mergedSources : `SourceCatalog` 

502 The merged `SourceCatalog` that contains parent footprints 

503 to (potentially) deblend. 

504 

505 Returns 

506 ------- 

507 fluxCatalogs: dict or None 

508 Keys are the names of the filters and the values are 

509 `lsst.afw.table.source.source.SourceCatalog`'s. 

510 These are the flux-conserved catalogs with heavy footprints with 

511 the image data weighted by the multiband templates. 

512 If `self.config.conserveFlux` is `False`, then this item will be 

513 None 

514 templateCatalogs: dict or None 

515 Keys are the names of the filters and the values are 

516 `lsst.afw.table.source.source.SourceCatalog`'s. 

517 These are catalogs with heavy footprints that are the templates 

518 created by the multiband templates. 

519 If `self.config.saveTemplates` is `False`, then this item will be 

520 None 

521 """ 

522 return self.deblend(mExposure, mergedSources) 

523 

524 @pipeBase.timeMethod 

525 def deblend(self, mExposure, sources): 

526 """Deblend a data cube of multiband images 

527 

528 Parameters 

529 ---------- 

530 mExposure : `MultibandExposure` 

531 The exposures should be co-added images of the same 

532 shape and region of the sky. 

533 sources : `SourceCatalog` 

534 The merged `SourceCatalog` that contains parent footprints 

535 to (potentially) deblend. 

536 

537 Returns 

538 ------- 

539 fluxCatalogs : dict or None 

540 Keys are the names of the filters and the values are 

541 `lsst.afw.table.source.source.SourceCatalog`'s. 

542 These are the flux-conserved catalogs with heavy footprints with 

543 the image data weighted by the multiband templates. 

544 If `self.config.conserveFlux` is `False`, then this item will be 

545 None 

546 templateCatalogs : dict or None 

547 Keys are the names of the filters and the values are 

548 `lsst.afw.table.source.source.SourceCatalog`'s. 

549 These are catalogs with heavy footprints that are the templates 

550 created by the multiband templates. 

551 If `self.config.saveTemplates` is `False`, then this item will be 

552 None 

553 """ 

554 import time 

555 

556 filters = mExposure.filters 

557 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure))) 

558 

559 # Create the output catalogs 

560 templateCatalogs = {} 

561 # This must be returned but is not calculated right now, setting it to 

562 # None to be consistent with doc string 

563 fluxCatalogs = None 

564 for f in filters: 

565 _catalog = afwTable.SourceCatalog(sources.table.clone()) 

566 _catalog.extend(sources) 

567 templateCatalogs[f] = _catalog 

568 

569 n0 = len(sources) 

570 nparents = 0 

571 for pk, src in enumerate(sources): 

572 foot = src.getFootprint() 

573 bbox = foot.getBBox() 

574 peaks = foot.getPeaks() 

575 

576 # Since we use the first peak for the parent object, we should 

577 # propagate its flags to the parent source. 

578 src.assign(peaks[0], self.peakSchemaMapper) 

579 

580 # Block of Skipping conditions 

581 if len(peaks) < 2 and not self.config.processSingles: 

582 for f in filters: 

583 templateCatalogs[f][pk].set(self.runtimeKey, 0) 

584 continue 

585 if self._isLargeFootprint(foot): 

586 src.set(self.tooBigKey, True) 

587 self._skipParent(src, mExposure.mask) 

588 self.log.trace('Parent %i: skipping large footprint', int(src.getId())) 

589 continue 

590 if self._isMasked(foot, mExposure): 

591 src.set(self.maskedKey, True) 

592 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

593 mask = afwImage.MaskX(mask, xy0=bbox.getMin()) 

594 self._skipParent(src, mask) 

595 self.log.trace('Parent %i: skipping masked footprint', int(src.getId())) 

596 continue 

597 if self.config.maxNumberOfPeaks > 0 and len(peaks) > self.config.maxNumberOfPeaks: 

598 src.set(self.tooManyPeaksKey, True) 

599 self._skipParent(src, mExposure.mask) 

600 msg = 'Parent {0}: Too many peaks, skipping blend' 

601 self.log.trace(msg.format(int(src.getId()))) 

602 # Unlike meas_deblender, in scarlet we skip the entire blend 

603 # if the number of peaks exceeds max peaks, since neglecting 

604 # to model any peaks often results in catastrophic failure 

605 # of scarlet to generate models for the brighter sources. 

606 continue 

607 

608 nparents += 1 

609 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks)) 

610 # Run the deblender 

611 blendError = None 

612 try: 

613 t0 = time.time() 

614 # Build the parameter lists with the same ordering 

615 blend, skipped = deblend(mExposure, foot, self.config) 

616 tf = time.time() 

617 runtime = (tf-t0)*1000 

618 src.set(self.deblendFailedKey, False) 

619 src.set(self.runtimeKey, runtime) 

620 converged = _checkBlendConvergence(blend, self.config.relativeError) 

621 src.set(self.blendConvergenceFailedFlagKey, converged) 

622 sources = [src for src in blend.sources] 

623 # Re-insert place holders for skipped sources 

624 # to propagate them in the catalog so 

625 # that the peaks stay consistent 

626 for k in skipped: 

627 sources.insert(k, None) 

628 # Catch all errors and filter out the ones that we know about 

629 except Exception as e: 

630 blendError = type(e).__name__ 

631 if isinstance(e, ScarletGradientError): 

632 src.set(self.iterKey, e.iterations) 

633 elif not isinstance(e, IncompleteDataError): 

634 blendError = "UnknownError" 

635 self._skipParent(src, mExposure.mask) 

636 if self.config.catchFailures: 

637 # Make it easy to find UnknownErrors in the log file 

638 self.log.warn("UnknownError") 

639 import traceback 

640 traceback.print_exc() 

641 else: 

642 raise 

643 

644 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), blendError)) 

645 src.set(self.deblendFailedKey, True) 

646 src.set(self.deblendErrorKey, blendError) 

647 self._skipParent(src, mExposure.mask) 

648 continue 

649 

650 # Add the merged source as a parent in the catalog for each band 

651 templateParents = {} 

652 parentId = src.getId() 

653 for f in filters: 

654 templateParents[f] = templateCatalogs[f][pk] 

655 templateParents[f].set(self.nPeaksKey, len(foot.peaks)) 

656 templateParents[f].set(self.runtimeKey, runtime) 

657 templateParents[f].set(self.iterKey, len(blend.loss)) 

658 logL = blend.loss[-1]-blend.observations[0].log_norm 

659 templateParents[f].set(self.scarletLogLKey, logL) 

660 

661 # Add each source to the catalogs in each band 

662 nchild = 0 

663 for k, source in enumerate(sources): 

664 # Skip any sources with no flux or that scarlet skipped because 

665 # it could not initialize 

666 if k in skipped: 

667 # No need to propagate anything 

668 continue 

669 else: 

670 src.set(self.deblendSkippedKey, False) 

671 models = modelToHeavy(source, filters, xy0=bbox.getMin(), 

672 observation=blend.observations[0]) 

673 

674 flux = scarlet.measure.flux(source) 

675 for fidx, f in enumerate(filters): 

676 if len(models[f].getPeaks()) != 1: 

677 err = "Heavy footprint should have a single peak, got {0}" 

678 raise ValueError(err.format(len(models[f].peaks))) 

679 cat = templateCatalogs[f] 

680 child = self._addChild(parentId, cat, models[f], source, converged, 

681 xy0=bbox.getMin(), flux=flux[fidx]) 

682 if parentId == 0: 

683 child.setId(src.getId()) 

684 child.set(self.runtimeKey, runtime) 

685 nchild += 1 

686 

687 # Set the number of children for each parent 

688 for f in filters: 

689 templateParents[f].set(self.nChildKey, nchild) 

690 

691 K = len(list(templateCatalogs.values())[0]) 

692 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources' 

693 % (n0, nparents, K-n0, K)) 

694 return fluxCatalogs, templateCatalogs 

695 

696 def _isLargeFootprint(self, footprint): 

697 """Returns whether a Footprint is large 

698 

699 'Large' is defined by thresholds on the area, size and axis ratio. 

700 These may be disabled independently by configuring them to be 

701 non-positive. 

702 

703 This is principally intended to get rid of satellite streaks, which the 

704 deblender or other downstream processing can have trouble dealing with 

705 (e.g., multiple large HeavyFootprints can chew up memory). 

706 """ 

707 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea: 

708 return True 

709 if self.config.maxFootprintSize > 0: 

710 bbox = footprint.getBBox() 

711 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize: 

712 return True 

713 if self.config.minFootprintAxisRatio > 0: 

714 axes = afwEll.Axes(footprint.getShape()) 

715 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA(): 

716 return True 

717 return False 

718 

719 def _isMasked(self, footprint, mExposure): 

720 """Returns whether the footprint violates the mask limits""" 

721 bbox = footprint.getBBox() 

722 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

723 size = float(footprint.getArea()) 

724 for maskName, limit in self.config.maskLimits.items(): 

725 maskVal = mExposure.mask.getPlaneBitMask(maskName) 

726 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin()) 

727 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels 

728 if (size - unmaskedSpan.getArea())/size > limit: 

729 return True 

730 return False 

731 

732 def _skipParent(self, source, masks): 

733 """Indicate that the parent source is not being deblended 

734 

735 We set the appropriate flags and masks for each exposure. 

736 

737 Parameters 

738 ---------- 

739 source : `lsst.afw.table.source.source.SourceRecord` 

740 The source to flag as skipped 

741 masks : list of `lsst.afw.image.MaskX` 

742 The mask in each band to update with the non-detection 

743 """ 

744 fp = source.getFootprint() 

745 source.set(self.deblendSkippedKey, True) 

746 if self.config.notDeblendedMask: 

747 for mask in masks: 

748 mask.addMaskPlane(self.config.notDeblendedMask) 

749 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask)) 

750 # The deblender didn't run on this source, so it has zero runtime 

751 source.set(self.runtimeKey, 0) 

752 # Set the center of the parent 

753 bbox = fp.getBBox() 

754 centerX = int(bbox.getMinX()+bbox.getWidth()/2) 

755 centerY = int(bbox.getMinY()+bbox.getHeight()/2) 

756 source.set(self.peakCenter, Point2I(centerX, centerY)) 

757 # There are no deblended children, so nChild = 0 

758 source.set(self.nChildKey, 0) 

759 # But we also want to know how many peaks that we would have 

760 # deblended if the parent wasn't skipped. 

761 source.set(self.nPeaksKey, len(fp.peaks)) 

762 # The blend was skipped, so it didn't take any iterations 

763 source.set(self.iterKey, 0) 

764 # Top level parents are not a detected peak, so they have no peakId 

765 source.set(self.peakIdKey, 0) 

766 # Top level parents also have no parentNPeaks 

767 source.set(self.parentNPeaksKey, 0) 

768 

769 def _addChild(self, parentId, sources, heavy, scarletSource, blend_converged, xy0, flux): 

770 """Add a child to a catalog 

771 

772 This creates a new child in the source catalog, 

773 assigning it a parent id, adding a footprint, 

774 and setting all appropriate flags based on the 

775 deblender result. 

776 """ 

777 assert len(heavy.getPeaks()) == 1 

778 src = sources.addNew() 

779 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper) 

780 src.setParent(parentId) 

781 src.setFootprint(heavy) 

782 # Set the psf key based on whether or not the source was 

783 # deblended using the PointSource model. 

784 # This key is not that useful anymore since we now keep track of 

785 # `modelType`, but we continue to propagate it in case code downstream 

786 # is expecting it. 

787 src.set(self.psfKey, scarletSource.__class__.__name__ == "PointSource") 

788 src.set(self.runtimeKey, 0) 

789 src.set(self.blendConvergenceFailedFlagKey, not blend_converged) 

790 

791 # Set the position of the peak from the parent footprint 

792 # This will make it easier to match the same source across 

793 # deblenders and across observations, where the peak 

794 # position is unlikely to change unless enough time passes 

795 # for a source to move on the sky. 

796 peak = scarletSource.detectedPeak 

797 src.set(self.peakCenter, Point2I(peak["i_x"], peak["i_y"])) 

798 src.set(self.peakIdKey, peak["id"]) 

799 

800 # The children have a single peak 

801 src.set(self.nPeaksKey, 1) 

802 

803 # Store the flux at the center of the model and the total 

804 # scarlet flux measurement. 

805 morph = afwDet.multiband.heavyFootprintToImage(heavy).image.array 

806 

807 # Set the flux at the center of the model (for SNR) 

808 try: 

809 cy, cx = scarletSource.center 

810 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0]) 

811 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0]) 

812 src.set(self.modelCenterFlux, morph[cy, cx]) 

813 except AttributeError: 

814 msg = "Did not recognize coordinates for source type of `{0}`, " 

815 msg += "could not write coordinates or center flux. " 

816 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information." 

817 logger.warning(msg.format(type(scarletSource))) 

818 

819 src.set(self.modelTypeKey, scarletSource.__class__.__name__) 

820 src.set(self.edgeFluxFlagKey, scarletSource.isEdge) 

821 # Include the source flux in the model space in the catalog. 

822 # This uses the narrower model PSF, which ensures that all sources 

823 # not located on an edge have all of their flux included in the 

824 # measurement. 

825 src.set(self.scarletFluxKey, flux) 

826 return src