Coverage for python/lsst/meas/extensions/scarlet/io.py: 27%

240 statements  

« prev     ^ index     » next       coverage.py v6.4.1, created at 2022-06-23 11:52 +0000

1from __future__ import annotations 

2 

3from dataclasses import dataclass 

4import json 

5from typing import Any 

6import numpy as np 

7from scarlet.bbox import Box, overlapped_slices 

8from scarlet.lite import LiteBlend, LiteFactorizedComponent, LiteObservation, LiteSource, LiteParameter 

9from scarlet.lite.measure import weight_sources 

10 

11from lsst.geom import Box2I, Extent2I, Point2I, Point2D 

12from lsst.afw.detection.multiband import heavyFootprintToImage 

13 

14from .source import liteModelToHeavy 

15 

16__all__ = [ 

17 "ScarletComponentData", 

18 "ScarletFactorizedComponentData", 

19 "ScarletSourceData", 

20 "ScarletBlendData", 

21 "ScarletModelData", 

22 "updateBlendRecords", 

23 "boundedDataToBox", 

24 "ComponentCube", 

25 "dataToScarlet", 

26 "scarletLiteToData", 

27 "scarletToData", 

28] 

29 

30 

31@dataclass 

32class ScarletComponentData: 

33 """Data for a component expressed as a 3D data cube 

34 

35 For now this is used for scarlet main source models because 

36 their structure is too complex to persist in the same 

37 way that scarlet lite components can be persisted. 

38 

39 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention, 

40 not the scarlet/C++ ``(y, x)`` convention. 

41 

42 Attributes 

43 ---------- 

44 xy0 : `tuple` of `int` 

45 The lower bound of the components bounding box. 

46 extent : `tuple` of `int` 

47 The `(width, height)` of the component array. 

48 center : `tuple` of `int` 

49 The center of the component. 

50 model : `numpy.ndarray` 

51 The model for the component. 

52 """ 

53 xy0: tuple[int, int] 

54 extent: tuple[int, int] 

55 center: tuple[float, float] 

56 model: np.ndarray 

57 

58 def asDict(self) -> dict: 

59 """Return the object encoded into a dict for JSON serialization 

60 

61 Returns 

62 ------- 

63 result : `dict` 

64 The object encoded as a JSON compatible dict 

65 """ 

66 return { 

67 "xy0": self.xy0, 

68 "extent": self.extent, 

69 "center": self.extent, 

70 "model": tuple(self.model.flatten().astype(float)) 

71 } 

72 

73 @classmethod 

74 def fromDict(cls, data: dict) -> "ScarletComponentData": 

75 """Reconstruct `ScarletComponentData` from JSON compatible dict 

76 

77 Parameters 

78 ---------- 

79 data : `dict` 

80 Dictionary representation of the object 

81 

82 Returns 

83 ------- 

84 result : `ScarletComponentData` 

85 The reconstructed object 

86 """ 

87 dataShallowCopy = dict(data) 

88 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

89 dataShallowCopy["extent"] = tuple(data["extent"]) 

90 shape = dataShallowCopy['extent'][::-1] 

91 numBands = shape[0] * shape[1] 

92 dataShallowCopy['model'] = np.array(data['model']).reshape((numBands,) + shape).astype(np.float32) 

93 return cls(**dataShallowCopy) 

94 

95 

96@dataclass 

97class ScarletFactorizedComponentData: 

98 """Data for a factorized component 

99 

100 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention, 

101 not the scarlet/C++ ``(y, x)`` convention. 

102 

103 Attributes 

104 ---------- 

105 xy0 : `tuple` of `int` 

106 The lower bound of the components bounding box. 

107 extent : `tuple` of `int` 

108 The `(width, height)` of the component array. 

109 center : `tuple` of `int` 

110 The center of the component. 

111 sed : `numpy.ndarray` 

112 The SED of the component. 

113 morph : `numpy.ndarray` 

114 The 2D morphology of the component. 

115 """ 

116 xy0: tuple[int, int] 

117 extent: tuple[int, int] 

118 center: tuple[float, float] 

119 sed: np.ndarray 

120 morph: np.ndarray 

121 

122 def asDict(self) -> dict: 

123 """Return the object encoded into a dict for JSON serialization 

124 

125 Returns 

126 ------- 

127 result : `dict` 

128 The object encoded as a JSON compatible dict 

129 """ 

130 return { 

131 "xy0": self.xy0, 

132 "extent": self.extent, 

133 "center": self.extent, 

134 "sed": tuple(self.sed.astype(float)), 

135 "morph": tuple(self.morph.flatten().astype(float)) 

136 } 

137 

138 @classmethod 

139 def fromDict(cls, data: dict) -> "ScarletFactorizedComponentData": 

140 """Reconstruct `ScarletFactorizedComponentData` from JSON compatible 

141 dict. 

142 

143 Parameters 

144 ---------- 

145 data : `dict` 

146 Dictionary representation of the object 

147 

148 Returns 

149 ------- 

150 result : `ScarletFactorizedComponentData` 

151 The reconstructed object 

152 """ 

153 dataShallowCopy = dict(data) 

154 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

155 dataShallowCopy["extent"] = tuple(data["extent"]) 

156 shape = dataShallowCopy['extent'][::-1] 

157 dataShallowCopy["sed"] = np.array(data["sed"]).astype(np.float32) 

158 dataShallowCopy['morph'] = np.array(data['morph']).reshape(shape).astype(np.float32) 

159 return cls(**dataShallowCopy) 

160 

161 

162@dataclass 

163class ScarletSourceData: 

164 """Data for a scarlet source 

165 

166 Attributes 

167 ---------- 

168 components : `list` of `ScarletComponentData` 

169 The components contained in the source that are not factorized. 

170 factorizedComponents : `list` of `ScarletFactorizedComponentData` 

171 The components contained in the source that are factorized. 

172 peakId : `int` 

173 The peak ID of the source in it's parent's footprint peak catalog. 

174 """ 

175 components: list[ScarletComponentData] 

176 factorizedComponents: list[ScarletFactorizedComponentData] 

177 peakId: int 

178 

179 def asDict(self) -> dict: 

180 """Return the object encoded into a dict for JSON serialization 

181 

182 Returns 

183 ------- 

184 result : `dict` 

185 The object encoded as a JSON compatible dict 

186 """ 

187 result = { 

188 "components": [], 

189 "factorized": [], 

190 "peakId": self.peakId, 

191 } 

192 for component in self.components: 

193 reduced = component.asDict() 

194 result["components"].append(reduced) 

195 

196 for component in self.factorizedComponents: 

197 reduced = component.asDict() 

198 result["factorized"].append(reduced) 

199 return result 

200 

201 @classmethod 

202 def fromDict(cls, data: dict) -> "ScarletSourceData": 

203 """Reconstruct `ScarletSourceData` from JSON compatible 

204 dict. 

205 

206 Parameters 

207 ---------- 

208 data : `dict` 

209 Dictionary representation of the object 

210 

211 Returns 

212 ------- 

213 result : `ScarletSourceData` 

214 The reconstructed object 

215 """ 

216 dataShallowCopy = dict(data) 

217 del dataShallowCopy["factorized"] 

218 components = [] 

219 for component in data['components']: 

220 component = ScarletComponentData.fromDict(component) 

221 components.append(component) 

222 dataShallowCopy['components'] = components 

223 

224 factorized = [] 

225 for component in data["factorized"]: 

226 component = ScarletFactorizedComponentData.fromDict(component) 

227 factorized.append(component) 

228 dataShallowCopy['factorizedComponents'] = factorized 

229 dataShallowCopy["peakId"] = int(data["peakId"]) 

230 return cls(**dataShallowCopy) 

231 

232 

233@dataclass 

234class ScarletBlendData: 

235 """Data for an entire blend. 

236 

237 Note that `xy0`, `extent`, and `psfCenter` use lsst ``(x, y)`` convention, 

238 not the scarlet/C++ ``(y, x)`` convention. 

239 

240 Attributes 

241 ---------- 

242 xy0 : `tuple` of `int` 

243 The lower bound of the components bounding box. 

244 extent : `tuple` of `int` 

245 The `(width, height)` of the component array. 

246 sources : `dict` of `int`: `ScarletSourceData` 

247 Data for the sources contained in the blend. 

248 psfCenter : `tuple` of `int` 

249 The location used for the center of the PSF for 

250 the blend. 

251 """ 

252 xy0: tuple[int, int] 

253 extent: tuple[int, int] 

254 sources: dict[int, ScarletSourceData] 

255 psfCenter: tuple[float, float] 

256 

257 def asDict(self) -> dict: 

258 """Return the object encoded into a dict for JSON serialization 

259 

260 Returns 

261 ------- 

262 result : `dict` 

263 The object encoded as a JSON compatible dict 

264 """ 

265 result: dict[str, Any] = {"xy0": self.xy0, "extent": self.extent, "psfCenter": self.psfCenter} 

266 result['sources'] = {id: source.asDict() for id, source in self.sources.items()} 

267 return result 

268 

269 @classmethod 

270 def fromDict(cls, data: dict) -> "ScarletBlendData": 

271 """Reconstruct `ScarletBlendData` from JSON compatible 

272 dict. 

273 

274 Parameters 

275 ---------- 

276 data : `dict` 

277 Dictionary representation of the object 

278 

279 Returns 

280 ------- 

281 result : `ScarletBlendData` 

282 The reconstructed object 

283 """ 

284 dataShallowCopy = dict(data) 

285 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

286 dataShallowCopy["extent"] = tuple(data["extent"]) 

287 dataShallowCopy["psfCenter"] = tuple(data["psfCenter"]) 

288 dataShallowCopy["sources"] = {int(id): ScarletSourceData.fromDict(source) 

289 for id, source in data['sources'].items()} 

290 return cls(**dataShallowCopy) 

291 

292 

293class ScarletModelData: 

294 """A container that propagates scarlet models for an entire `SourceCatalog` 

295 """ 

296 def __init__(self, filters, psf, blends=None): 

297 """Initialize an instance 

298 

299 Parameters 

300 ---------- 

301 filters : `list` of `str` 

302 The names of the filters. 

303 The order of the filters must be the same as the order of 

304 the multiband model arrays, and SEDs. 

305 psf : `numpy.ndarray` 

306 The 2D array of the PSF in scarlet model space. 

307 This is typically a narrow Gaussian integrated over the 

308 pixels in the exposure. 

309 blends : `dict` of [`int`: `ScarletBlendData`] 

310 Initial `dict` that maps parent IDs from the source catalog 

311 to the scarlet model data for the parent blend. 

312 """ 

313 self.filters = filters 

314 self.psf = psf 

315 if blends is None: 

316 blends = {} 

317 self.blends = blends 

318 

319 def json(self) -> str: 

320 """Serialize the data model to a JSON formatted string 

321 

322 Returns 

323 ------- 

324 result : `str` 

325 The result of the object converted into a JSON format 

326 """ 

327 result = { 

328 "filters": self.filters, 

329 "psfShape": self.psf.shape, 

330 "psf": list(self.psf.flatten()), 

331 "blends": {id: blend.asDict() for id, blend in self.blends.items()} 

332 } 

333 return json.dumps(result) 

334 

335 @classmethod 

336 def parse_obj(cls, data: dict) -> "ScarletModelData": 

337 """Construct a ScarletModelData from python decoded JSON object. 

338 

339 Parameters 

340 ---------- 

341 inMemoryDataset : `Mapping` 

342 The result of json.load(s) on a JSON persisted ScarletModelData 

343 

344 Returns 

345 ------- 

346 result : `ScarletModelData` 

347 The `ScarletModelData` that was loaded the from the input object 

348 """ 

349 dataShallowCopy = dict(data) 

350 modelPsf = np.array( 

351 dataShallowCopy["psf"]).reshape(dataShallowCopy.pop("psfShape")).astype(np.float32) 

352 dataShallowCopy["psf"] = modelPsf 

353 dataShallowCopy["blends"] = { 

354 int(id): ScarletBlendData.fromDict(blend) 

355 for id, blend in data['blends'].items() 

356 } 

357 return cls(**dataShallowCopy) 

358 

359 def updateCatalogFootprints(self, catalog, band, psfModel, redistributeImage=None, 

360 removeScarletData=True, updateFluxColumns=True): 

361 """Use the scarlet models to set HeavyFootprints for modeled sources 

362 

363 Parameters 

364 ---------- 

365 catalog : `lsst.afw.table.SourceCatalog` 

366 The catalog missing heavy footprints for deblended sources. 

367 band : `str` 

368 The name of the band that the catalog data describes. 

369 psfModel : `lsst.afw.detection.Psf` 

370 The observed PSF model for the catalog. 

371 redistributeImage : `lsst.afw.image.Image` 

372 The image that is the source for flux re-distribution. 

373 If `redistributeImage` is `None` then flux re-distribution is 

374 not performed. 

375 removeScarletData : `bool` 

376 Whether or not to remove `ScarletBlendData` for each blend 

377 in order to save memory. 

378 updateFluxColumns : `bool` 

379 Whether or not to update the `deblend_*` columns in the catalog. 

380 This should only be true when the input catalog schema already 

381 contains those columns. 

382 """ 

383 # Iterate over the blends, since flux re-distribution must be done on 

384 # all of the children with the same parent 

385 parents = catalog[catalog["parent"] == 0] 

386 # Get the index of the model for the given band 

387 bandIndex = self.filters.index(band) 

388 

389 for parentRecord in parents: 

390 parentId = parentRecord.getId() 

391 

392 try: 

393 blendModel = self.blends[parentId] 

394 except KeyError: 

395 # The parent was skipped in the deblender, so there are 

396 # no models for its sources. 

397 continue 

398 updateBlendRecords( 

399 blendData=blendModel, 

400 catalog=catalog, 

401 modelPsf=self.psf, 

402 observedPsf=psfModel, 

403 redistributeImage=redistributeImage, 

404 bandIndex=bandIndex, 

405 parentFootprint=parentRecord.getFootprint(), 

406 updateFluxColumns=updateFluxColumns, 

407 ) 

408 

409 # Save memory by removing the data for the blend 

410 if removeScarletData: 

411 del self.blends[parentId] 

412 

413 

414def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, redistributeImage, bandIndex, 

415 parentFootprint, updateFluxColumns): 

416 """Create footprints and update band-dependent columns in the catalog 

417 

418 Parameters 

419 ---------- 

420 blendData : `ScarletBlendData` 

421 Persistable data for the entire blend. 

422 catalog : `lsst.afw.table.SourceCatalog` 

423 The catalog that is being updated. 

424 modelPsf : `numpy.ndarray` 

425 The 2D model of the PSF. 

426 observedPsf : `lsst.afw.detection.Psf` 

427 The observed PSF model for the catalog. 

428 redistributeImage : `lsst.afw.image.Image` 

429 The image that is the source for flux re-distribution. 

430 If `redistributeImage` is `None` then flux re-distribution is 

431 not performed. 

432 bandIndex : `int` 

433 The number of the band to extract. 

434 parentFootprint : `lsst.afw.Footprint` 

435 The footprint of the parent, used for masking out the model 

436 when re-distributing flux. 

437 updateFluxColumns : `bool` 

438 Whether or not to update the `deblend_*` columns in the catalog. 

439 This should only be true when the input catalog schema already 

440 contains those columns. 

441 """ 

442 # We import here to avoid a circular dependency 

443 from .scarletDeblendTask import setDeblenderMetrics, getFootprintMask 

444 

445 useFlux = redistributeImage is not None 

446 xy0 = Point2I(*blendData.xy0) 

447 

448 blend = dataToScarlet( 

449 blendData=blendData, 

450 nBands=1, 

451 bandIndex=bandIndex, 

452 dtype=np.float32, 

453 ) 

454 

455 position = Point2D(*blendData.psfCenter) 

456 psfs = observedPsf.computeKernelImage(position).array[None, :, :] 

457 modelBox = Box((1,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0)) 

458 blend.observation = DummyObservation( 

459 psfs=psfs, 

460 model_psf=modelPsf[None, :, :], 

461 bbox=modelBox, 

462 dtype=np.float32, 

463 ) 

464 

465 # Set the metrics for the blend. 

466 # TODO: remove this once DM-34558 runs all deblender metrics 

467 # in a separate task. 

468 if updateFluxColumns: 

469 setDeblenderMetrics(blend) 

470 

471 # Update the source models if the scarlet models are used as 

472 # templates to re-distribute flux from an observation 

473 if useFlux: 

474 # Extract the image array to re-distribute its flux 

475 extent = Extent2I(*blendData.extent) 

476 bbox = Box2I(xy0, extent) 

477 blend.observation.images = redistributeImage[bbox].array[None, :, :] 

478 blend.observation.weights = ~getFootprintMask(parentFootprint, None)[None, :, :] 

479 # Re-distribute the flux for each source in-place 

480 weight_sources(blend) 

481 

482 # Update the HeavyFootprints for deblended sources 

483 # and update the band-dependent catalog columns. 

484 for source in blend.sources: 

485 sourceRecord = catalog.find(source.recordId) 

486 parent = catalog.find(sourceRecord["parent"]) 

487 peaks = parent.getFootprint().peaks 

488 peakIdx = np.where(peaks["id"] == source.peakId)[0][0] 

489 source.detectedPeak = peaks[peakIdx] 

490 # Set the Footprint 

491 heavy = liteModelToHeavy( 

492 source=source, 

493 blend=blend, 

494 xy0=xy0, 

495 useFlux=useFlux, 

496 ) 

497 sourceRecord.setFootprint(heavy) 

498 

499 if updateFluxColumns: 

500 # Set the flux of the scarlet model 

501 # TODO: this field should probably be deprecated, 

502 # since DM-33710 gives users access to the scarlet models. 

503 model = source.get_model()[0] 

504 sourceRecord.set("deblend_scarletFlux", np.sum(model)) 

505 

506 # Set the flux at the center of the model 

507 peak = heavy.peaks[0] 

508 img = heavyFootprintToImage(heavy, fill=0.0) 

509 sourceRecord.set("deblend_peak_instFlux", img.image[Point2I(peak["i_x"], peak["i_y"])]) 

510 

511 # Set the metrics columns. 

512 # TODO: remove this once DM-34558 runs all deblender metrics 

513 # in a separate task. 

514 sourceRecord.set("deblend_maxOverlap", source.metrics.maxOverlap[0]) 

515 sourceRecord.set("deblend_fluxOverlap", source.metrics.fluxOverlap[0]) 

516 sourceRecord.set("deblend_fluxOverlapFraction", source.metrics.fluxOverlapFraction[0]) 

517 sourceRecord.set("deblend_blendedness", source.metrics.blendedness[0]) 

518 

519 

520def boundedDataToBox(nBands, boundedData): 

521 """Convert bounds from the data storage format to a `scarlet.bbox.Box` 

522 

523 Parameters 

524 ---------- 

525 nBands : `int` 

526 The number of bands in the model. 

527 boundedData : 

528 The scarlet data object containing `xy0` and `extent` 

529 attributes giving bounding box information in the lsst format 

530 `(x, y)`. 

531 

532 Returns 

533 ------- 

534 bbox : `scarlet.bbox.Box` 

535 The scarlet bounding box generated by the bounds. 

536 """ 

537 xy0 = (0, ) + boundedData.xy0[::-1] 

538 extent = (nBands, ) + boundedData.extent[::-1] 

539 bbox = Box(shape=extent, origin=xy0) 

540 return bbox 

541 

542 

543class ComponentCube: 

544 """Dummy component for scarlet main sources. 

545 

546 This is duck-typed to a `scarlet.lite.LiteComponent` in order to 

547 generate a model from the component. 

548 

549 If scarlet lite ever implements a component as a data cube, 

550 this class can be removed. 

551 """ 

552 def __init__(self, model, center, bbox, model_bbox): 

553 """Initialization 

554 

555 Parameters 

556 ---------- 

557 model : `numpy.ndarray` 

558 The 3D (bands, y, x) model of the component. 

559 center : `tuple` of `int` 

560 The `(y, x)` center of the component. 

561 bbox : `scarlet.bbox.Box` 

562 The bounding box of the component. 

563 `model_bbox` : `scarlet.bbox.Box` 

564 The bounding box of the entire blend. 

565 """ 

566 self.model = model 

567 self.center = center 

568 self.bbox = bbox 

569 

570 def get_model(self, bbox=None): 

571 """Generate the model for the source 

572 

573 Parameters 

574 ---------- 

575 bbox : `scarlet.bbox.Box` 

576 The bounding box to insert the model into. 

577 If `bbox` is `None` then the model is returned in its own 

578 bounding box. 

579 

580 Returns 

581 ------- 

582 model : `numpy.ndarray` 

583 The model as a 3D `(band, y, x)` array. 

584 """ 

585 model = self.model 

586 if bbox is not None: 

587 slices = overlapped_slices(bbox, self.bbox) 

588 _model = np.zeros(bbox.shape, model.dtype) 

589 _model[slices[0]] = model[slices[1]] 

590 model = _model 

591 return model 

592 

593 

594class DummyParameter(LiteParameter): 

595 """A parameter place holder 

596 

597 Models in scarlet have parameters, not arrays, 

598 for their sed's and morphologies, so this wrapper for 

599 the SED and morphology arrays implements the required 

600 methods and attributes. 

601 """ 

602 def __init__(self, x): 

603 self.x = x 

604 self.grad = None 

605 

606 def update(self, it, input_grad, *args): 

607 pass 

608 

609 def grow(self, new_shape, dist): 

610 pass 

611 

612 def shrink(self, dist): 

613 pass 

614 

615 

616class DummyObservation(LiteObservation): 

617 """An observation that does not have any image data 

618 

619 In order to reproduce a model in an observed seeing we make use of the 

620 scarlet `LiteObservation` class, but since we are not fitting the model 

621 to data we can use empty arrays for the image, variance, and weight data, 

622 and zero for the `noise_rms`. 

623 

624 Parameters 

625 ---------- 

626 psfs : `numpy.ndarray` 

627 The array of PSF images in each band 

628 psf_model : `numpy.ndarray` 

629 The image of the model PSF. 

630 bbox : `scarlet.bbox.Box` 

631 dtype : `numpy.dtype` 

632 The data type of the model that is generated. 

633 """ 

634 def __init__(self, psfs, model_psf, bbox, dtype): 

635 dummyImage = np.zeros([], dtype=dtype) 

636 

637 super().__init__( 

638 images=dummyImage, 

639 variance=dummyImage, 

640 weights=dummyImage, 

641 psfs=psfs, 

642 model_psf=model_psf, 

643 convolution_mode="real", 

644 noise_rms=0, 

645 bbox=bbox, 

646 ) 

647 

648 

649def dataToScarlet(blendData, nBands=None, bandIndex=None, dtype=np.float32): 

650 """Convert the storage data model into a scarlet lite blend 

651 

652 Parameters 

653 ---------- 

654 blendData : `ScarletBlendData` 

655 Persistable data for the entire blend. 

656 nBands : `int` 

657 The number of bands in the image. 

658 If `bandIndex` is `None` then this parameter is ignored and 

659 the number of bands is set to 1. 

660 bandIndex : `int` 

661 Index of model to extract. If `bandIndex` is `None` then the 

662 full model is extracted. 

663 dtype : `numpy.dtype` 

664 The data type of the model that is generated. 

665 

666 Returns 

667 ------- 

668 blend : `scarlet.lite.LiteBlend` 

669 A scarlet blend model extracted from persisted data. 

670 """ 

671 if bandIndex is not None: 

672 nBands = 1 

673 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0)) 

674 sources = [] 

675 for sourceId, sourceData in blendData.sources.items(): 

676 components = [] 

677 for componentData in sourceData.components: 

678 bbox = boundedDataToBox(nBands, componentData) 

679 if bandIndex is None: 

680 model = componentData.model 

681 else: 

682 model = componentData.model[bandIndex][None, :, :] 

683 component = ComponentCube( 

684 model=model, 

685 center=tuple(componentData.center[::-1]), 

686 bbox=bbox, 

687 ) 

688 components.append(component) 

689 for componentData in sourceData.factorizedComponents: 

690 bbox = boundedDataToBox(nBands, componentData) 

691 # Add dummy values for properties only needed for 

692 # model fitting. 

693 if bandIndex is None: 

694 sed = componentData.sed 

695 else: 

696 sed = componentData.sed[bandIndex:bandIndex+1] 

697 sed = DummyParameter(sed) 

698 morph = DummyParameter(componentData.morph) 

699 # Note: since we aren't fitting a model, we don't need to 

700 # set the RMS of the background. 

701 # We set it to NaN just to be safe. 

702 component = LiteFactorizedComponent( 

703 sed=sed, 

704 morph=morph, 

705 center=tuple(componentData.center[::-1]), 

706 bbox=bbox, 

707 model_bbox=modelBox, 

708 bg_rms=np.nan 

709 ) 

710 components.append(component) 

711 

712 source = LiteSource(components=components, dtype=dtype) 

713 source.recordId = sourceId 

714 source.peakId = sourceData.peakId 

715 sources.append(source) 

716 

717 return LiteBlend(sources=sources, observation=None) 

718 

719 

720def scarletLiteToData(blend, psfCenter, xy0): 

721 """Convert a scarlet lite blend into a persistable data object 

722 

723 Parameters 

724 ---------- 

725 blend : `scarlet.lite.LiteBlend` 

726 The blend that is being persisted. 

727 psfCenter : `tuple` of `int` 

728 The center of the PSF. 

729 xy0 : `tuple` of `int` 

730 The lower coordinate of the entire blend. 

731 

732 Returns 

733 ------- 

734 blendData : `ScarletBlendDataModel` 

735 The data model for a single blend. 

736 """ 

737 sources = {} 

738 for source in blend.sources: 

739 components = [] 

740 for component in source.components: 

741 if isinstance(component, LiteFactorizedComponent): 

742 componentData = ScarletFactorizedComponentData( 

743 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]), 

744 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]), 

745 center=tuple(int(x) for x in component.center[::-1]), 

746 sed=component.sed, 

747 morph=component.morph, 

748 ) 

749 else: 

750 componentData = ScarletComponentData( 

751 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]), 

752 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]), 

753 center=tuple(int(x) for x in component.center[::-1]), 

754 model=component.get_model(), 

755 ) 

756 components.append(componentData) 

757 sourceData = ScarletSourceData( 

758 components=[], 

759 factorizedComponents=components, 

760 peakId=source.peakId, 

761 ) 

762 sources[source.recordId] = sourceData 

763 

764 blendData = ScarletBlendData( 

765 xy0=(xy0.x, xy0.y), 

766 extent=blend.observation.bbox.shape[1:][::-1], 

767 sources=sources, 

768 psfCenter=psfCenter, 

769 ) 

770 

771 return blendData 

772 

773 

774def scarletToData(blend, psfCenter, xy0): 

775 """Convert a scarlet blend into a persistable data object 

776 

777 Parameters 

778 ---------- 

779 blend : `scarlet.Blend` 

780 The blend that is being persisted. 

781 psfCenter : `tuple` of `int` 

782 The center of the PSF. 

783 xy0 : `tuple` of `int` 

784 The lower coordinate of the entire blend. 

785 

786 Returns 

787 ------- 

788 blendData : `ScarletBlendDataModel` 

789 The data model for a single blend. 

790 """ 

791 sources = {} 

792 for source in blend.sources: 

793 componentData = ScarletComponentData( 

794 xy0=tuple(int(x) for x in source.bbox.origin[1:][::-1]), 

795 extent=tuple(int(x) for x in source.bbox.shape[1:][::-1]), 

796 center=tuple(int(x) for x in source.center[::-1]), 

797 model=source.get_model(), 

798 ) 

799 

800 sourceData = ScarletSourceData( 

801 components=[componentData], 

802 factorizedComponents=[], 

803 peakId=source.peakId, 

804 ) 

805 sources[source.recordId] = sourceData 

806 

807 blendData = ScarletBlendData( 

808 xy0=(int(xy0.x), int(xy0.y)), 

809 extent=tuple(int(x) for x in blend.observation.bbox.shape[1:][::-1]), 

810 sources=sources, 

811 psfCenter=psfCenter, 

812 ) 

813 

814 return blendData