Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of meas_extensions_scarlet. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22from functools import partial 

23 

24import numpy as np 

25from scarlet.psf import PSF, gaussian 

26from scarlet import PointSource, ExtendedSource, MultiComponentSource 

27 

28import lsst.log 

29import lsst.pex.config as pexConfig 

30import lsst.pipe.base as pipeBase 

31from lsst.geom import Point2I, Box2I, Point2D 

32import lsst.afw.math as afwMath 

33import lsst.afw.geom as afwGeom 

34import lsst.afw.geom.ellipses as afwEll 

35import lsst.afw.image.utils 

36import lsst.afw.image as afwImage 

37import lsst.afw.detection as afwDet 

38import lsst.afw.table as afwTable 

39 

40from .source import init_source, modelToHeavy 

41from .blend import LsstBlend, checkBlendConvergence 

42from .observation import LsstFrame, LsstObservation 

43 

44__all__ = ["deblend", "ScarletDeblendConfig", "ScarletDeblendTask"] 

45 

46logger = lsst.log.Log.getLogger("meas.deblender.deblend") 

47 

48 

49def _getPsfFwhm(psf): 

50 """Calculate the FWHM of the `psf` 

51 """ 

52 return psf.computeShape().getDeterminantRadius() * 2.35 

53 

54 

55def _estimateRMS(exposure, statsMask): 

56 """Estimate the standard dev. of an image 

57 

58 Calculate the RMS of the `exposure`. 

59 """ 

60 mi = exposure.getMaskedImage() 

61 statsCtrl = afwMath.StatisticsControl() 

62 statsCtrl.setAndMask(mi.getMask().getPlaneBitMask(statsMask)) 

63 stats = afwMath.makeStatistics(mi.variance, mi.mask, afwMath.STDEV | afwMath.MEAN, statsCtrl) 

64 rms = np.sqrt(stats.getValue(afwMath.MEAN)**2 + stats.getValue(afwMath.STDEV)**2) 

65 return rms 

66 

67 

68def _computePsfImage(self, position=None): 

69 """Get a multiband PSF image 

70 The PSF Kernel Image is computed for each band 

71 and combined into a (filter, y, x) array and stored 

72 as `self._psfImage`. 

73 The result is not cached, so if the same PSF is expected 

74 to be used multiple times it is a good idea to store the 

75 result in another variable. 

76 Note: this is a temporary fix during the deblender sprint. 

77 In the future this function will replace the current method 

78 in `afw.MultibandExposure.computePsfImage` (DM-19789). 

79 Parameters 

80 ---------- 

81 position : `Point2D` or `tuple` 

82 Coordinates to evaluate the PSF. If `position` is `None` 

83 then `Psf.getAveragePosition()` is used. 

84 Returns 

85 ------- 

86 self._psfImage: array 

87 The multiband PSF image. 

88 """ 

89 psfs = [] 

90 # Make the coordinates into a Point2D (if necessary) 

91 if not isinstance(position, Point2D) and position is not None: 

92 position = Point2D(position[0], position[1]) 

93 

94 for single in self.singles: 

95 if position is None: 

96 psf = single.getPsf().computeImage() 

97 psfs.append(psf) 

98 else: 

99 psf = single.getPsf().computeImage(position) 

100 psfs.append(psf) 

101 left = np.min([psf.getBBox().getMinX() for psf in psfs]) 

102 bottom = np.min([psf.getBBox().getMinY() for psf in psfs]) 

103 right = np.max([psf.getBBox().getMaxX() for psf in psfs]) 

104 top = np.max([psf.getBBox().getMaxY() for psf in psfs]) 

105 bbox = Box2I(Point2I(left, bottom), Point2I(right, top)) 

106 psfs = [afwImage.utils.projectImage(psf, bbox) for psf in psfs] 

107 psfImage = afwImage.MultibandImage.fromImages(self.filters, psfs) 

108 return psfImage 

109 

110 

111def getFootprintMask(footprint, mExposure, config): 

112 """Mask pixels outside the footprint 

113 

114 Parameters 

115 ---------- 

116 mExposure : `lsst.image.MultibandExposure` 

117 - The multiband exposure containing the image, 

118 mask, and variance data 

119 footprint : `lsst.detection.Footprint` 

120 - The footprint of the parent to deblend 

121 config : `ScarletDeblendConfig` 

122 - Configuration of the deblending task 

123 

124 Returns 

125 ------- 

126 footprintMask : array 

127 Boolean array with pixels not in the footprint set to one. 

128 """ 

129 bbox = footprint.getBBox() 

130 fpMask = afwImage.Mask(bbox) 

131 footprint.spans.setMask(fpMask, 1) 

132 fpMask = ~fpMask.getArray().astype(bool) 

133 return fpMask 

134 

135 

136def deblend(mExposure, footprint, config): 

137 """Deblend a parent footprint 

138 

139 Parameters 

140 ---------- 

141 mExposure : `lsst.image.MultibandExposure` 

142 - The multiband exposure containing the image, 

143 mask, and variance data 

144 footprint : `lsst.detection.Footprint` 

145 - The footprint of the parent to deblend 

146 config : `ScarletDeblendConfig` 

147 - Configuration of the deblending task 

148 """ 

149 # Extract coordinates from each MultiColorPeak 

150 bbox = footprint.getBBox() 

151 

152 # Create the data array from the masked images 

153 images = mExposure.image[:, bbox].array 

154 

155 # Use the inverse variance as the weights 

156 if config.useWeights: 

157 weights = 1/mExposure.variance[:, bbox].array 

158 else: 

159 weights = np.ones_like(images) 

160 

161 # Mask out the pixels outside the footprint 

162 mask = getFootprintMask(footprint, mExposure, config) 

163 weights *= ~mask 

164 

165 psfs = _computePsfImage(mExposure, footprint.getCentroid()).array.astype(np.float32) 

166 psfShape = (config.modelPsfSize, config.modelPsfSize) 

167 model_psf = PSF(partial(gaussian, sigma=config.modelPsfSigma), shape=(None,)+psfShape) 

168 

169 frame = LsstFrame(images.shape, psfs=model_psf, channels=mExposure.filters) 

170 observation = LsstObservation(images, psfs=psfs, weights=weights, channels=mExposure.filters) 

171 observation.match(frame) 

172 

173 assert(config.sourceModel in ["single", "double", "point", "fit"]) 

174 

175 # Only deblend sources that can be initialized 

176 sources = [] 

177 skipped = [] 

178 for k, center in enumerate(footprint.peaks): 

179 if config.sourceModel == "single": 

180 components = 1 

181 elif config.sourceModel == "double": 

182 components = 2 

183 elif config.sourceModel == "point": 

184 components = 0 

185 elif config.sourceModel == "fit": 

186 # It is likely in the future that there will be some heuristic 

187 # used to determine what type of model to use for each source, 

188 # but that has not yet been implemented (see DM-22551) 

189 raise NotImplementedError("sourceModel 'fit' has not been implemented yet") 

190 else: 

191 raise ValueError("Unrecognized sourceModel") 

192 

193 source = init_source(frame=frame, peak=center, observation=observation, bbox=bbox, 

194 symmetric=config.symmetric, monotonic=config.monotonic, 

195 thresh=config.morphThresh, components=components) 

196 if source is not None: 

197 sources.append(source) 

198 else: 

199 skipped.append(k) 

200 

201 blend = LsstBlend(sources, observation) 

202 blend.fit(max_iter=config.maxIter, e_rel=config.relativeError) 

203 

204 return blend, skipped 

205 

206 

207class ScarletDeblendConfig(pexConfig.Config): 

208 """MultibandDeblendConfig 

209 

210 Configuration for the multiband deblender. 

211 The parameters are organized by the parameter types, which are 

212 - Stopping Criteria: Used to determine if the fit has converged 

213 - Position Fitting Criteria: Used to fit the positions of the peaks 

214 - Constraints: Used to apply constraints to the peaks and their components 

215 - Other: Parameters that don't fit into the above categories 

216 """ 

217 # Stopping Criteria 

218 maxIter = pexConfig.Field(dtype=int, default=300, 

219 doc=("Maximum number of iterations to deblend a single parent")) 

220 relativeError = pexConfig.Field(dtype=float, default=1e-4, 

221 doc=("Change in the loss function between" 

222 "iterations to exit fitter")) 

223 

224 # Blend Configuration options 

225 recenterPeriod = pexConfig.Field(dtype=int, default=5, 

226 doc=("Number of iterations between recentering")) 

227 exactLipschitz = pexConfig.Field(dtype=bool, default=True, 

228 doc=("Calculate exact Lipschitz constant in every step" 

229 "(True) or only calculate the approximate" 

230 "Lipschitz constant with significant changes in A,S" 

231 "(False)")) 

232 

233 # Constraints 

234 sparse = pexConfig.Field(dtype=bool, default=True, doc="Make models compact and sparse") 

235 morphThresh = pexConfig.Field(dtype=float, default=1, 

236 doc="Fraction of background RMS a pixel must have" 

237 "to be included in the initial morphology") 

238 monotonic = pexConfig.Field(dtype=bool, default=True, doc="Make models monotonic") 

239 symmetric = pexConfig.Field(dtype=bool, default=False, doc="Make models symmetric") 

240 symmetryThresh = pexConfig.Field(dtype=float, default=1.0, 

241 doc=("Strictness of symmetry, from" 

242 "0 (no symmetry enforced) to" 

243 "1 (perfect symmetry required)." 

244 "If 'S' is not in `constraints`, this argument is ignored")) 

245 

246 # Other scarlet paremeters 

247 useWeights = pexConfig.Field( 

248 dtype=bool, default=True, 

249 doc=("Whether or not use use inverse variance weighting." 

250 "If `useWeights` is `False` then flat weights are used")) 

251 usePsfConvolution = pexConfig.Field( 

252 dtype=bool, default=True, 

253 doc=("Whether or not to convolve the morphology with the" 

254 "PSF in each band or use the same morphology in all bands")) 

255 modelPsfSize = pexConfig.Field( 

256 dtype=int, default=11, 

257 doc="Model PSF side length in pixels") 

258 modelPsfSigma = pexConfig.Field( 

259 dtype=float, default=0.8, 

260 doc="Define sigma for the model frame PSF") 

261 saveTemplates = pexConfig.Field( 

262 dtype=bool, default=True, 

263 doc="Whether or not to save the SEDs and templates") 

264 processSingles = pexConfig.Field( 

265 dtype=bool, default=False, 

266 doc="Whether or not to process isolated sources in the deblender") 

267 storeHistory = pexConfig.Field(dtype=bool, default=False, 

268 doc="Whether or not to store the history for each source") 

269 sourceModel = pexConfig.Field( 

270 dtype=str, default="single", 

271 doc=("How to determine which model to use for sources, from\n" 

272 "- 'single': use a single component for all sources\n" 

273 "- 'double': use a bulge disk model for all sources\n" 

274 "- 'point: use a point-source model for all sources\n" 

275 "- 'fit: use a PSF fitting model to determine the number of components (not yet implemented)") 

276 ) 

277 

278 # Mask-plane restrictions 

279 badMask = pexConfig.ListField( 

280 dtype=str, default=["BAD", "CR", "NO_DATA", "SAT", "SUSPECT"], 

281 doc="Whether or not to process isolated sources in the deblender") 

282 statsMask = pexConfig.ListField(dtype=str, default=["SAT", "INTRP", "NO_DATA"], 

283 doc="Mask planes to ignore when performing statistics") 

284 maskLimits = pexConfig.DictField( 

285 keytype=str, 

286 itemtype=float, 

287 default={}, 

288 doc=("Mask planes with the corresponding limit on the fraction of masked pixels. " 

289 "Sources violating this limit will not be deblended."), 

290 ) 

291 

292 # Size restrictions 

293 maxNumberOfPeaks = pexConfig.Field( 

294 dtype=int, default=0, 

295 doc=("Only deblend the brightest maxNumberOfPeaks peaks in the parent" 

296 " (<= 0: unlimited)")) 

297 maxFootprintArea = pexConfig.Field( 

298 dtype=int, default=1000000, 

299 doc=("Maximum area for footprints before they are ignored as large; " 

300 "non-positive means no threshold applied")) 

301 maxFootprintSize = pexConfig.Field( 

302 dtype=int, default=0, 

303 doc=("Maximum linear dimension for footprints before they are ignored " 

304 "as large; non-positive means no threshold applied")) 

305 minFootprintAxisRatio = pexConfig.Field( 

306 dtype=float, default=0.0, 

307 doc=("Minimum axis ratio for footprints before they are ignored " 

308 "as large; non-positive means no threshold applied")) 

309 

310 # Failure modes 

311 notDeblendedMask = pexConfig.Field( 

312 dtype=str, default="NOT_DEBLENDED", optional=True, 

313 doc="Mask name for footprints not deblended, or None") 

314 catchFailures = pexConfig.Field( 

315 dtype=bool, default=False, 

316 doc=("If True, catch exceptions thrown by the deblender, log them, " 

317 "and set a flag on the parent, instead of letting them propagate up")) 

318 propagateAllPeaks = pexConfig.Field(dtype=bool, default=False, 

319 doc=('Guarantee that all peaks produce a child source.')) 

320 

321 

322class ScarletDeblendTask(pipeBase.Task): 

323 """ScarletDeblendTask 

324 

325 Split blended sources into individual sources. 

326 

327 This task has no return value; it only modifies the SourceCatalog in-place. 

328 """ 

329 ConfigClass = ScarletDeblendConfig 

330 _DefaultName = "scarletDeblend" 

331 

332 def __init__(self, schema, peakSchema=None, **kwargs): 

333 """Create the task, adding necessary fields to the given schema. 

334 

335 Parameters 

336 ---------- 

337 schema : `lsst.afw.table.schema.schema.Schema` 

338 Schema object for measurement fields; will be modified in-place. 

339 peakSchema : `lsst.afw.table.schema.schema.Schema` 

340 Schema of Footprint Peaks that will be passed to the deblender. 

341 Any fields beyond the PeakTable minimal schema will be transferred 

342 to the main source Schema. If None, no fields will be transferred 

343 from the Peaks. 

344 filters : list of str 

345 Names of the filters used for the eposures. This is needed to store 

346 the SED as a field 

347 **kwargs 

348 Passed to Task.__init__. 

349 """ 

350 pipeBase.Task.__init__(self, **kwargs) 

351 

352 peakMinimalSchema = afwDet.PeakTable.makeMinimalSchema() 

353 if peakSchema is None: 

354 # In this case, the peakSchemaMapper will transfer nothing, but 

355 # we'll still have one 

356 # to simplify downstream code 

357 self.peakSchemaMapper = afwTable.SchemaMapper(peakMinimalSchema, schema) 

358 else: 

359 self.peakSchemaMapper = afwTable.SchemaMapper(peakSchema, schema) 

360 for item in peakSchema: 

361 if item.key not in peakMinimalSchema: 

362 self.peakSchemaMapper.addMapping(item.key, item.field) 

363 # Because SchemaMapper makes a copy of the output schema 

364 # you give its ctor, it isn't updating this Schema in 

365 # place. That's probably a design flaw, but in the 

366 # meantime, we'll keep that schema in sync with the 

367 # peakSchemaMapper.getOutputSchema() manually, by adding 

368 # the same fields to both. 

369 schema.addField(item.field) 

370 assert schema == self.peakSchemaMapper.getOutputSchema(), "Logic bug mapping schemas" 

371 self._addSchemaKeys(schema) 

372 self.schema = schema 

373 

374 def _addSchemaKeys(self, schema): 

375 """Add deblender specific keys to the schema 

376 """ 

377 self.runtimeKey = schema.addField('runtime', type=np.float32, doc='runtime in ms') 

378 

379 self.iterKey = schema.addField('iterations', type=np.int32, doc='iterations to converge') 

380 

381 self.nChildKey = schema.addField('deblend_nChild', type=np.int32, 

382 doc='Number of children this object has (defaults to 0)') 

383 self.psfKey = schema.addField('deblend_deblendedAsPsf', type='Flag', 

384 doc='Deblender thought this source looked like a PSF') 

385 self.tooManyPeaksKey = schema.addField('deblend_tooManyPeaks', type='Flag', 

386 doc='Source had too many peaks; ' 

387 'only the brightest were included') 

388 self.tooBigKey = schema.addField('deblend_parentTooBig', type='Flag', 

389 doc='Parent footprint covered too many pixels') 

390 self.maskedKey = schema.addField('deblend_masked', type='Flag', 

391 doc='Parent footprint was predominantly masked') 

392 self.sedNotConvergedKey = schema.addField('deblend_sedConvergenceFailed', type='Flag', 

393 doc='scarlet sed optimization did not converge before' 

394 'config.maxIter') 

395 self.morphNotConvergedKey = schema.addField('deblend_morphConvergenceFailed', type='Flag', 

396 doc='scarlet morph optimization did not converge before' 

397 'config.maxIter') 

398 self.blendConvergenceFailedFlagKey = schema.addField('deblend_blendConvergenceFailedFlag', 

399 type='Flag', 

400 doc='at least one source in the blend' 

401 'failed to converge') 

402 self.edgePixelsKey = schema.addField('deblend_edgePixels', type='Flag', 

403 doc='Source had flux on the edge of the parent footprint') 

404 self.deblendFailedKey = schema.addField('deblend_failed', type='Flag', 

405 doc="Deblending failed on source") 

406 

407 self.deblendSkippedKey = schema.addField('deblend_skipped', type='Flag', 

408 doc="Deblender skipped this source") 

409 self.modelCenter = afwTable.Point2DKey.addFields(schema, name="deblend_peak_center", 

410 doc="Center used to apply constraints in scarlet", 

411 unit="pixel") 

412 self.modelCenterFlux = schema.addField('deblend_peak_instFlux', type=float, units='count', 

413 doc="The instFlux at the peak position of deblended mode") 

414 self.modelTypeKey = schema.addField("deblend_modelType", type="String", size=20, 

415 doc="The type of model used, for example " 

416 "MultiComponentSource, ExtendedSource, PointSource") 

417 # self.log.trace('Added keys to schema: %s', ", ".join(str(x) for x in 

418 # (self.nChildKey, self.tooManyPeaksKey, self.tooBigKey)) 

419 # ) 

420 

421 @pipeBase.timeMethod 

422 def run(self, mExposure, mergedSources): 

423 """Get the psf from each exposure and then run deblend(). 

424 

425 Parameters 

426 ---------- 

427 mExposure : `MultibandExposure` 

428 The exposures should be co-added images of the same 

429 shape and region of the sky. 

430 mergedSources : `SourceCatalog` 

431 The merged `SourceCatalog` that contains parent footprints 

432 to (potentially) deblend. 

433 

434 Returns 

435 ------- 

436 fluxCatalogs: dict or None 

437 Keys are the names of the filters and the values are 

438 `lsst.afw.table.source.source.SourceCatalog`'s. 

439 These are the flux-conserved catalogs with heavy footprints with 

440 the image data weighted by the multiband templates. 

441 If `self.config.conserveFlux` is `False`, then this item will be 

442 None 

443 templateCatalogs: dict or None 

444 Keys are the names of the filters and the values are 

445 `lsst.afw.table.source.source.SourceCatalog`'s. 

446 These are catalogs with heavy footprints that are the templates 

447 created by the multiband templates. 

448 If `self.config.saveTemplates` is `False`, then this item will be 

449 None 

450 """ 

451 return self.deblend(mExposure, mergedSources) 

452 

453 @pipeBase.timeMethod 

454 def deblend(self, mExposure, sources): 

455 """Deblend a data cube of multiband images 

456 

457 Parameters 

458 ---------- 

459 mExposure : `MultibandExposure` 

460 The exposures should be co-added images of the same 

461 shape and region of the sky. 

462 sources : `SourceCatalog` 

463 The merged `SourceCatalog` that contains parent footprints 

464 to (potentially) deblend. 

465 

466 Returns 

467 ------- 

468 fluxCatalogs : dict or None 

469 Keys are the names of the filters and the values are 

470 `lsst.afw.table.source.source.SourceCatalog`'s. 

471 These are the flux-conserved catalogs with heavy footprints with 

472 the image data weighted by the multiband templates. 

473 If `self.config.conserveFlux` is `False`, then this item will be 

474 None 

475 templateCatalogs : dict or None 

476 Keys are the names of the filters and the values are 

477 `lsst.afw.table.source.source.SourceCatalog`'s. 

478 These are catalogs with heavy footprints that are the templates 

479 created by the multiband templates. 

480 If `self.config.saveTemplates` is `False`, then this item will be 

481 None 

482 """ 

483 import time 

484 

485 filters = mExposure.filters 

486 self.log.info("Deblending {0} sources in {1} exposure bands".format(len(sources), len(mExposure))) 

487 

488 # Create the output catalogs 

489 templateCatalogs = {} 

490 # This must be returned but is not calculated right now, setting it to 

491 # None to be consistent with doc string 

492 fluxCatalogs = None 

493 for f in filters: 

494 _catalog = afwTable.SourceCatalog(sources.table.clone()) 

495 _catalog.extend(sources) 

496 templateCatalogs[f] = _catalog 

497 

498 n0 = len(sources) 

499 nparents = 0 

500 for pk, src in enumerate(sources): 

501 foot = src.getFootprint() 

502 bbox = foot.getBBox() 

503 logger.info("id: {0}".format(src["id"])) 

504 peaks = foot.getPeaks() 

505 

506 # Since we use the first peak for the parent object, we should 

507 # propagate its flags to the parent source. 

508 src.assign(peaks[0], self.peakSchemaMapper) 

509 

510 # Block of Skipping conditions 

511 if len(peaks) < 2 and not self.config.processSingles: 

512 for f in filters: 

513 templateCatalogs[f][pk].set(self.runtimeKey, 0) 

514 continue 

515 if self._isLargeFootprint(foot): 

516 src.set(self.tooBigKey, True) 

517 self._skipParent(src, mExposure.mask) 

518 self.log.trace('Parent %i: skipping large footprint', int(src.getId())) 

519 continue 

520 if self._isMasked(foot, mExposure): 

521 src.set(self.maskedKey, True) 

522 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

523 mask = afwImage.MaskX(mask, xy0=bbox.getMin()) 

524 self._skipParent(src, mask) 

525 self.log.trace('Parent %i: skipping masked footprint', int(src.getId())) 

526 continue 

527 if len(peaks) > self.config.maxNumberOfPeaks: 

528 src.set(self.tooManyPeaksKey, True) 

529 msg = 'Parent {0}: Too many peaks, using the first {1} peaks' 

530 self.log.trace(msg.format(int(src.getId()), self.config.maxNumberOfPeaks)) 

531 

532 nparents += 1 

533 self.log.trace('Parent %i: deblending %i peaks', int(src.getId()), len(peaks)) 

534 # Run the deblender 

535 try: 

536 t0 = time.time() 

537 # Build the parameter lists with the same ordering 

538 blend, skipped = deblend(mExposure, foot, self.config) 

539 tf = time.time() 

540 runtime = (tf-t0)*1000 

541 src.set(self.deblendFailedKey, False) 

542 src.set(self.runtimeKey, runtime) 

543 converged = checkBlendConvergence(blend, self.config.relativeError) 

544 src.set(self.blendConvergenceFailedFlagKey, converged) 

545 sources = [src for src in blend.sources] 

546 # Re-insert place holders for skipped sources 

547 # to propagate them in the catalog so 

548 # that the peaks stay consistent 

549 for k in skipped: 

550 sources.insert(k, None) 

551 except Exception as e: 

552 if self.config.catchFailures: 

553 self.log.warn("Unable to deblend source %d: %s" % (src.getId(), e)) 

554 src.set(self.deblendFailedKey, True) 

555 src.set(self.runtimeKey, 0) 

556 import traceback 

557 traceback.print_exc() 

558 continue 

559 else: 

560 raise 

561 

562 # Add the merged source as a parent in the catalog for each band 

563 templateParents = {} 

564 parentId = src.getId() 

565 for f in filters: 

566 templateParents[f] = templateCatalogs[f][pk] 

567 templateParents[f].set(self.runtimeKey, runtime) 

568 templateParents[f].set(self.iterKey, len(blend.loss)) 

569 

570 # Add each source to the catalogs in each band 

571 templateSpans = {f: afwGeom.SpanSet() for f in filters} 

572 nchild = 0 

573 for k, source in enumerate(sources): 

574 # Skip any sources with no flux or that scarlet skipped because 

575 # it could not initialize 

576 if k in skipped: 

577 if not self.config.propagateAllPeaks: 

578 # We don't care 

579 continue 

580 # We need to preserve the peak: make sure we have enough 

581 # info to create a minimal child src 

582 msg = "Peak at {0} failed deblending. Using minimal default info for child." 

583 self.log.trace(msg.format(src.getFootprint().peaks[k])) 

584 # copy the full footprint and strip out extra peaks 

585 foot = afwDet.Footprint(src.getFootprint()) 

586 peakList = foot.getPeaks() 

587 peakList.clear() 

588 peakList.append(src.peaks[k]) 

589 zeroMimg = afwImage.MaskedImageF(foot.getBBox()) 

590 heavy = afwDet.makeHeavyFootprint(foot, zeroMimg) 

591 models = afwDet.MultibandFootprint(mExposure.filters, [heavy]*len(mExposure.filters)) 

592 else: 

593 src.set(self.deblendSkippedKey, False) 

594 models = modelToHeavy(source, filters, xy0=bbox.getMin(), 

595 observation=blend.observations[0]) 

596 # TODO: We should eventually write the morphology and SED to 

597 # the catalog 

598 # morph = source.morphToHeavy(xy0=bbox.getMin()) 

599 # sed = source.sed / source.sed.sum() 

600 

601 for f in filters: 

602 if len(models[f].getPeaks()) != 1: 

603 err = "Heavy footprint should have a single peak, got {0}" 

604 raise ValueError(err.format(len(models[f].peaks))) 

605 cat = templateCatalogs[f] 

606 child = self._addChild(parentId, cat, models[f], source, converged, 

607 xy0=bbox.getMin()) 

608 if parentId == 0: 

609 child.setId(src.getId()) 

610 child.set(self.runtimeKey, runtime) 

611 else: 

612 templateSpans[f] = templateSpans[f].union(models[f].getSpans()) 

613 nchild += 1 

614 

615 # Child footprints may extend beyond the full extent of their 

616 # parent's which results in a failure of the replace-by-noise code 

617 # to reinstate these pixels to their original values. The 

618 # following updates the parent footprint in-place to ensure it 

619 # contains the full union of itself and all of its 

620 # children's footprints. 

621 for f in filters: 

622 templateParents[f].set(self.nChildKey, nchild) 

623 templateParents[f].getFootprint().setSpans(templateSpans[f]) 

624 

625 K = len(list(templateCatalogs.values())[0]) 

626 self.log.info('Deblended: of %i sources, %i were deblended, creating %i children, total %i sources' 

627 % (n0, nparents, K-n0, K)) 

628 return fluxCatalogs, templateCatalogs 

629 

630 def _isLargeFootprint(self, footprint): 

631 """Returns whether a Footprint is large 

632 

633 'Large' is defined by thresholds on the area, size and axis ratio. 

634 These may be disabled independently by configuring them to be 

635 non-positive. 

636 

637 This is principally intended to get rid of satellite streaks, which the 

638 deblender or other downstream processing can have trouble dealing with 

639 (e.g., multiple large HeavyFootprints can chew up memory). 

640 """ 

641 if self.config.maxFootprintArea > 0 and footprint.getArea() > self.config.maxFootprintArea: 

642 return True 

643 if self.config.maxFootprintSize > 0: 

644 bbox = footprint.getBBox() 

645 if max(bbox.getWidth(), bbox.getHeight()) > self.config.maxFootprintSize: 

646 return True 

647 if self.config.minFootprintAxisRatio > 0: 

648 axes = afwEll.Axes(footprint.getShape()) 

649 if axes.getB() < self.config.minFootprintAxisRatio*axes.getA(): 

650 return True 

651 return False 

652 

653 def _isMasked(self, footprint, mExposure): 

654 """Returns whether the footprint violates the mask limits""" 

655 bbox = footprint.getBBox() 

656 mask = np.bitwise_or.reduce(mExposure.mask[:, bbox].array, axis=0) 

657 size = float(footprint.getArea()) 

658 for maskName, limit in self.config.maskLimits.items(): 

659 maskVal = mExposure.mask.getPlaneBitMask(maskName) 

660 _mask = afwImage.MaskX(mask & maskVal, xy0=bbox.getMin()) 

661 unmaskedSpan = footprint.spans.intersectNot(_mask) # spanset of unmasked pixels 

662 if (size - unmaskedSpan.getArea())/size > limit: 

663 return True 

664 return False 

665 

666 def _skipParent(self, source, masks): 

667 """Indicate that the parent source is not being deblended 

668 

669 We set the appropriate flags and masks for each exposure. 

670 

671 Parameters 

672 ---------- 

673 source : `lsst.afw.table.source.source.SourceRecord` 

674 The source to flag as skipped 

675 masks : list of `lsst.afw.image.MaskX` 

676 The mask in each band to update with the non-detection 

677 """ 

678 fp = source.getFootprint() 

679 source.set(self.deblendSkippedKey, True) 

680 source.set(self.nChildKey, len(fp.getPeaks())) # It would have this many if we deblended them all 

681 if self.config.notDeblendedMask: 

682 for mask in masks: 

683 mask.addMaskPlane(self.config.notDeblendedMask) 

684 fp.spans.setMask(mask, mask.getPlaneBitMask(self.config.notDeblendedMask)) 

685 

686 def _addChild(self, parentId, sources, heavy, scarlet_source, blend_converged, xy0): 

687 """Add a child to a catalog 

688 

689 This creates a new child in the source catalog, 

690 assigning it a parent id, adding a footprint, 

691 and setting all appropriate flags based on the 

692 deblender result. 

693 """ 

694 assert len(heavy.getPeaks()) == 1 

695 src = sources.addNew() 

696 src.assign(heavy.getPeaks()[0], self.peakSchemaMapper) 

697 src.setParent(parentId) 

698 src.setFootprint(heavy) 

699 src.set(self.psfKey, False) 

700 src.set(self.runtimeKey, 0) 

701 src.set(self.blendConvergenceFailedFlagKey, not blend_converged) 

702 if isinstance(scarlet_source, ExtendedSource): 

703 cy, cx = scarlet_source.pixel_center 

704 morph = scarlet_source.morph 

705 elif isinstance(scarlet_source, MultiComponentSource): 

706 cy, cx = scarlet_source.components[0].pixel_center 

707 morph = scarlet_source.components[0].morph 

708 elif isinstance(scarlet_source, PointSource): 

709 cy, cx = scarlet_source.parameters[1] 

710 morph = scarlet_source.morph 

711 else: 

712 msg = "Did not recognize source type of `{0}`, could not write coordinates or center flux. " 

713 msg += "Add `{0}` to meas_extensions_scarlet to properly persist this information." 

714 logger.warning(msg.format(type(scarlet_source))) 

715 return src 

716 xmin, ymin = xy0 

717 src.set(self.modelCenter, Point2D(cx+xmin, cy+ymin)) 

718 cy = np.max([np.min([int(np.round(cy)), morph.shape[0]-1]), 0]) 

719 cx = np.max([np.min([int(np.round(cx)), morph.shape[1]-1]), 0]) 

720 src.set(self.modelCenterFlux, morph[cy, cx]) 

721 src.set(self.modelTypeKey, scarlet_source.__class__.__name__) 

722 return src