Coverage for python/lsst/meas/extensions/scarlet/io.py: 27%

239 statements  

« prev     ^ index     » next       coverage.py v6.4.1, created at 2022-06-09 03:26 -0700

1from __future__ import annotations 

2 

3from dataclasses import dataclass 

4import json 

5from typing import Any 

6import numpy as np 

7from scarlet.bbox import Box, overlapped_slices 

8from scarlet.lite import LiteBlend, LiteFactorizedComponent, LiteObservation, LiteSource, LiteParameter 

9from scarlet.lite.measure import weight_sources 

10 

11from lsst.geom import Box2I, Extent2I, Point2I, Point2D 

12from lsst.afw.detection.multiband import heavyFootprintToImage 

13 

14from .source import liteModelToHeavy 

15 

16 

17@dataclass 

18class ScarletComponentData: 

19 """Data for a component expressed as a 3D data cube 

20 

21 For now this is used for scarlet main source models because 

22 their structure is too complex to persist in the same 

23 way that scarlet lite components can be persisted. 

24 

25 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention, 

26 not the scarlet/C++ ``(y, x)`` convention. 

27 

28 Attributes 

29 ---------- 

30 xy0 : `tuple` of `int` 

31 The lower bound of the components bounding box. 

32 extent : `tuple` of `int` 

33 The `(width, height)` of the component array. 

34 center : `tuple` of `int` 

35 The center of the component. 

36 model : `numpy.ndarray` 

37 The model for the component. 

38 """ 

39 xy0: tuple[int, int] 

40 extent: tuple[int, int] 

41 center: tuple[float, float] 

42 model: np.ndarray 

43 

44 def asDict(self) -> dict: 

45 """Return the object encoded into a dict for JSON serialization 

46 

47 Returns 

48 ------- 

49 result : `dict` 

50 The object encoded as a JSON compatible dict 

51 """ 

52 return { 

53 "xy0": self.xy0, 

54 "extent": self.extent, 

55 "center": self.extent, 

56 "model": tuple(self.model.flatten().astype(float)) 

57 } 

58 

59 @classmethod 

60 def fromDict(cls, data: dict) -> "ScarletComponentData": 

61 """Reconstruct `ScarletComponentData` from JSON compatible dict 

62 

63 Parameters 

64 ---------- 

65 data : `dict` 

66 Dictionary representation of the object 

67 

68 Returns 

69 ------- 

70 result : `ScarletComponentData` 

71 The reconstructed object 

72 """ 

73 dataShallowCopy = dict(data) 

74 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

75 dataShallowCopy["extent"] = tuple(data["extent"]) 

76 shape = dataShallowCopy['extent'][::-1] 

77 numBands = shape[0] * shape[1] 

78 dataShallowCopy['model'] = np.array(data['model']).reshape((numBands,) + shape).astype(np.float32) 

79 return cls(**dataShallowCopy) 

80 

81 

82@dataclass 

83class ScarletFactorizedComponentData: 

84 """Data for a factorized component 

85 

86 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention, 

87 not the scarlet/C++ ``(y, x)`` convention. 

88 

89 Attributes 

90 ---------- 

91 xy0 : `tuple` of `int` 

92 The lower bound of the components bounding box. 

93 extent : `tuple` of `int` 

94 The `(width, height)` of the component array. 

95 center : `tuple` of `int` 

96 The center of the component. 

97 sed : `numpy.ndarray` 

98 The SED of the component. 

99 morph : `numpy.ndarray` 

100 The 2D morphology of the component. 

101 """ 

102 xy0: tuple[int, int] 

103 extent: tuple[int, int] 

104 center: tuple[float, float] 

105 sed: np.ndarray 

106 morph: np.ndarray 

107 

108 def asDict(self) -> dict: 

109 """Return the object encoded into a dict for JSON serialization 

110 

111 Returns 

112 ------- 

113 result : `dict` 

114 The object encoded as a JSON compatible dict 

115 """ 

116 return { 

117 "xy0": self.xy0, 

118 "extent": self.extent, 

119 "center": self.extent, 

120 "sed": tuple(self.sed.astype(float)), 

121 "morph": tuple(self.morph.flatten().astype(float)) 

122 } 

123 

124 @classmethod 

125 def fromDict(cls, data: dict) -> "ScarletFactorizedComponentData": 

126 """Reconstruct `ScarletFactorizedComponentData` from JSON compatible 

127 dict. 

128 

129 Parameters 

130 ---------- 

131 data : `dict` 

132 Dictionary representation of the object 

133 

134 Returns 

135 ------- 

136 result : `ScarletFactorizedComponentData` 

137 The reconstructed object 

138 """ 

139 dataShallowCopy = dict(data) 

140 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

141 dataShallowCopy["extent"] = tuple(data["extent"]) 

142 shape = dataShallowCopy['extent'][::-1] 

143 dataShallowCopy["sed"] = np.array(data["sed"]).astype(np.float32) 

144 dataShallowCopy['morph'] = np.array(data['morph']).reshape(shape).astype(np.float32) 

145 return cls(**dataShallowCopy) 

146 

147 

148@dataclass 

149class ScarletSourceData: 

150 """Data for a scarlet source 

151 

152 Attributes 

153 ---------- 

154 components : `list` of `ScarletComponentData` 

155 The components contained in the source that are not factorized. 

156 factorizedComponents : `list` of `ScarletFactorizedComponentData` 

157 The components contained in the source that are factorized. 

158 peakId : `int` 

159 The peak ID of the source in it's parent's footprint peak catalog. 

160 """ 

161 components: list[ScarletComponentData] 

162 factorizedComponents: list[ScarletFactorizedComponentData] 

163 peakId: int 

164 

165 def asDict(self) -> dict: 

166 """Return the object encoded into a dict for JSON serialization 

167 

168 Returns 

169 ------- 

170 result : `dict` 

171 The object encoded as a JSON compatible dict 

172 """ 

173 result = { 

174 "components": [], 

175 "factorized": [], 

176 "peakId": self.peakId, 

177 } 

178 for component in self.components: 

179 reduced = component.asDict() 

180 result["components"].append(reduced) 

181 

182 for component in self.factorizedComponents: 

183 reduced = component.asDict() 

184 result["factorized"].append(reduced) 

185 return result 

186 

187 @classmethod 

188 def fromDict(cls, data: dict) -> "ScarletSourceData": 

189 """Reconstruct `ScarletSourceData` from JSON compatible 

190 dict. 

191 

192 Parameters 

193 ---------- 

194 data : `dict` 

195 Dictionary representation of the object 

196 

197 Returns 

198 ------- 

199 result : `ScarletSourceData` 

200 The reconstructed object 

201 """ 

202 dataShallowCopy = dict(data) 

203 del dataShallowCopy["factorized"] 

204 components = [] 

205 for component in data['components']: 

206 component = ScarletComponentData.fromDict(component) 

207 components.append(component) 

208 dataShallowCopy['components'] = components 

209 

210 factorized = [] 

211 for component in data["factorized"]: 

212 component = ScarletFactorizedComponentData.fromDict(component) 

213 factorized.append(component) 

214 dataShallowCopy['factorizedComponents'] = factorized 

215 dataShallowCopy["peakId"] = int(data["peakId"]) 

216 return cls(**dataShallowCopy) 

217 

218 

219@dataclass 

220class ScarletBlendData: 

221 """Data for an entire blend. 

222 

223 Note that `xy0`, `extent`, and `psfCenter` use lsst ``(x, y)`` convention, 

224 not the scarlet/C++ ``(y, x)`` convention. 

225 

226 Attributes 

227 ---------- 

228 xy0 : `tuple` of `int` 

229 The lower bound of the components bounding box. 

230 extent : `tuple` of `int` 

231 The `(width, height)` of the component array. 

232 sources : `dict` of `int`: `ScarletSourceData` 

233 Data for the sources contained in the blend. 

234 psfCenter : `tuple` of `int` 

235 The location used for the center of the PSF for 

236 the blend. 

237 """ 

238 xy0: tuple[int, int] 

239 extent: tuple[int, int] 

240 sources: dict[int, ScarletSourceData] 

241 psfCenter: tuple[float, float] 

242 

243 def asDict(self) -> dict: 

244 """Return the object encoded into a dict for JSON serialization 

245 

246 Returns 

247 ------- 

248 result : `dict` 

249 The object encoded as a JSON compatible dict 

250 """ 

251 result: dict[str, Any] = {"xy0": self.xy0, "extent": self.extent, "psfCenter": self.psfCenter} 

252 result['sources'] = {id: source.asDict() for id, source in self.sources.items()} 

253 return result 

254 

255 @classmethod 

256 def fromDict(cls, data: dict) -> "ScarletBlendData": 

257 """Reconstruct `ScarletBlendData` from JSON compatible 

258 dict. 

259 

260 Parameters 

261 ---------- 

262 data : `dict` 

263 Dictionary representation of the object 

264 

265 Returns 

266 ------- 

267 result : `ScarletBlendData` 

268 The reconstructed object 

269 """ 

270 dataShallowCopy = dict(data) 

271 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

272 dataShallowCopy["extent"] = tuple(data["extent"]) 

273 dataShallowCopy["psfCenter"] = tuple(data["psfCenter"]) 

274 dataShallowCopy["sources"] = {int(id): ScarletSourceData.fromDict(source) 

275 for id, source in data['sources'].items()} 

276 return cls(**dataShallowCopy) 

277 

278 

279class ScarletModelData: 

280 """A container that propagates scarlet models for an entire `SourceCatalog` 

281 """ 

282 def __init__(self, filters, psf, blends=None): 

283 """Initialize an instance 

284 

285 Parameters 

286 ---------- 

287 filters : `list` of `str` 

288 The names of the filters. 

289 The order of the filters must be the same as the order of 

290 the multiband model arrays, and SEDs. 

291 psf : `numpy.ndarray` 

292 The 2D array of the PSF in scarlet model space. 

293 This is typically a narrow Gaussian integrated over the 

294 pixels in the exposure. 

295 blends : `dict` of [`int`: `ScarletBlendData`] 

296 Initial `dict` that maps parent IDs from the source catalog 

297 to the scarlet model data for the parent blend. 

298 """ 

299 self.filters = filters 

300 self.psf = psf 

301 if blends is None: 

302 blends = {} 

303 self.blends = blends 

304 

305 def json(self) -> str: 

306 """Serialize the data model to a JSON formatted string 

307 

308 Returns 

309 ------- 

310 result : `str` 

311 The result of the object converted into a JSON format 

312 """ 

313 result = { 

314 "filters": self.filters, 

315 "psfShape": self.psf.shape, 

316 "psf": list(self.psf.flatten()), 

317 "blends": {id: blend.asDict() for id, blend in self.blends.items()} 

318 } 

319 return json.dumps(result) 

320 

321 @classmethod 

322 def parse_obj(cls, data: dict) -> "ScarletModelData": 

323 """Construct a ScarletModelData from python decoded JSON object. 

324 

325 Parameters 

326 ---------- 

327 inMemoryDataset : `Mapping` 

328 The result of json.load(s) on a JSON persisted ScarletModelData 

329 

330 Returns 

331 ------- 

332 result : `ScarletModelData` 

333 The `ScarletModelData` that was loaded the from the input object 

334 """ 

335 dataShallowCopy = dict(data) 

336 modelPsf = np.array( 

337 dataShallowCopy["psf"]).reshape(dataShallowCopy.pop("psfShape")).astype(np.float32) 

338 dataShallowCopy["psf"] = modelPsf 

339 dataShallowCopy["blends"] = { 

340 int(id): ScarletBlendData.fromDict(blend) 

341 for id, blend in data['blends'].items() 

342 } 

343 return cls(**dataShallowCopy) 

344 

345 def updateCatalogFootprints(self, catalog, band, psfModel, redistributeImage=None, 

346 removeScarletData=True, updateFluxColumns=True): 

347 """Use the scarlet models to set HeavyFootprints for modeled sources 

348 

349 Parameters 

350 ---------- 

351 catalog : `lsst.afw.table.SourceCatalog` 

352 The catalog missing heavy footprints for deblended sources. 

353 band : `str` 

354 The name of the band that the catalog data describes. 

355 psfModel : `lsst.afw.detection.Psf` 

356 The observed PSF model for the catalog. 

357 redistributeImage : `lsst.afw.image.Image` 

358 The image that is the source for flux re-distribution. 

359 If `redistributeImage` is `None` then flux re-distribution is 

360 not performed. 

361 removeScarletData : `bool` 

362 Whether or not to remove `ScarletBlendData` for each blend 

363 in order to save memory. 

364 updateFluxColumns : `bool` 

365 Whether or not to update the `deblend_*` columns in the catalog. 

366 This should only be true when the input catalog schema already 

367 contains those columns. 

368 """ 

369 # Iterate over the blends, since flux re-distribution must be done on 

370 # all of the children with the same parent 

371 parents = catalog[catalog["parent"] == 0] 

372 # Get the index of the model for the given band 

373 bandIndex = self.filters.index(band) 

374 

375 for parentRecord in parents: 

376 parentId = parentRecord.getId() 

377 

378 try: 

379 blendModel = self.blends[parentId] 

380 except KeyError: 

381 # The parent was skipped in the deblender, so there are 

382 # no models for its sources. 

383 continue 

384 updateBlendRecords( 

385 blendData=blendModel, 

386 catalog=catalog, 

387 modelPsf=self.psf, 

388 observedPsf=psfModel, 

389 redistributeImage=redistributeImage, 

390 bandIndex=bandIndex, 

391 parentFootprint=parentRecord.getFootprint(), 

392 updateFluxColumns=updateFluxColumns, 

393 ) 

394 

395 # Save memory by removing the data for the blend 

396 if removeScarletData: 

397 del self.blends[parentId] 

398 

399 

400def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, redistributeImage, bandIndex, 

401 parentFootprint, updateFluxColumns): 

402 """Create footprints and update band-dependent columns in the catalog 

403 

404 Parameters 

405 ---------- 

406 blendData : `ScarletBlendData` 

407 Persistable data for the entire blend. 

408 catalog : `lsst.afw.table.SourceCatalog` 

409 The catalog that is being updated. 

410 modelPsf : `numpy.ndarray` 

411 The 2D model of the PSF. 

412 observedPsf : `lsst.afw.detection.Psf` 

413 The observed PSF model for the catalog. 

414 redistributeImage : `lsst.afw.image.Image` 

415 The image that is the source for flux re-distribution. 

416 If `redistributeImage` is `None` then flux re-distribution is 

417 not performed. 

418 bandIndex : `int` 

419 The number of the band to extract. 

420 parentFootprint : `lsst.afw.Footprint` 

421 The footprint of the parent, used for masking out the model 

422 when re-distributing flux. 

423 updateFluxColumns : `bool` 

424 Whether or not to update the `deblend_*` columns in the catalog. 

425 This should only be true when the input catalog schema already 

426 contains those columns. 

427 """ 

428 # We import here to avoid a circular dependency 

429 from .scarletDeblendTask import setDeblenderMetrics, getFootprintMask 

430 

431 useFlux = redistributeImage is not None 

432 xy0 = Point2I(*blendData.xy0) 

433 

434 blend = dataToScarlet( 

435 blendData=blendData, 

436 nBands=1, 

437 bandIndex=bandIndex, 

438 dtype=np.float32, 

439 ) 

440 

441 position = Point2D(*blendData.psfCenter) 

442 psfs = observedPsf.computeKernelImage(position).array[None, :, :] 

443 modelBox = Box((1,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0)) 

444 blend.observation = DummyObservation( 

445 psfs=psfs, 

446 model_psf=modelPsf[None, :, :], 

447 bbox=modelBox, 

448 dtype=np.float32, 

449 ) 

450 

451 # Set the metrics for the blend. 

452 # TODO: remove this once DM-34558 runs all deblender metrics 

453 # in a separate task. 

454 if updateFluxColumns: 

455 setDeblenderMetrics(blend) 

456 

457 # Update the source models if the scarlet models are used as 

458 # templates to re-distribute flux from an observation 

459 if useFlux: 

460 # Extract the image array to re-distribute its flux 

461 extent = Extent2I(*blendData.extent) 

462 bbox = Box2I(xy0, extent) 

463 blend.observation.images = redistributeImage[bbox].array[None, :, :] 

464 blend.observation.weights = ~getFootprintMask(parentFootprint, None)[None, :, :] 

465 # Re-distribute the flux for each source in-place 

466 weight_sources(blend) 

467 

468 # Update the HeavyFootprints for deblended sources 

469 # and update the band-dependent catalog columns. 

470 for source in blend.sources: 

471 sourceRecord = catalog.find(source.recordId) 

472 parent = catalog.find(sourceRecord["parent"]) 

473 peaks = parent.getFootprint().peaks 

474 peakIdx = np.where(peaks["id"] == source.peakId)[0][0] 

475 source.detectedPeak = peaks[peakIdx] 

476 # Set the Footprint 

477 heavy = liteModelToHeavy( 

478 source=source, 

479 blend=blend, 

480 xy0=xy0, 

481 useFlux=useFlux, 

482 ) 

483 sourceRecord.setFootprint(heavy) 

484 

485 if updateFluxColumns: 

486 # Set the flux of the scarlet model 

487 # TODO: this field should probably be deprecated, 

488 # since DM-33710 gives users access to the scarlet models. 

489 model = source.get_model()[0] 

490 sourceRecord.set("deblend_scarletFlux", np.sum(model)) 

491 

492 # Set the flux at the center of the model 

493 peak = heavy.peaks[0] 

494 img = heavyFootprintToImage(heavy, fill=0.0) 

495 sourceRecord.set("deblend_peak_instFlux", img.image[Point2I(peak["i_x"], peak["i_y"])]) 

496 

497 # Set the metrics columns. 

498 # TODO: remove this once DM-34558 runs all deblender metrics 

499 # in a separate task. 

500 sourceRecord.set("deblend_maxOverlap", source.metrics.maxOverlap[0]) 

501 sourceRecord.set("deblend_fluxOverlap", source.metrics.fluxOverlap[0]) 

502 sourceRecord.set("deblend_fluxOverlapFraction", source.metrics.fluxOverlapFraction[0]) 

503 sourceRecord.set("deblend_blendedness", source.metrics.blendedness[0]) 

504 

505 

506def boundedDataToBox(nBands, boundedData): 

507 """Convert bounds from the data storage format to a `scarlet.bbox.Box` 

508 

509 Parameters 

510 ---------- 

511 nBands : `int` 

512 The number of bands in the model. 

513 boundedData : 

514 The scarlet data object containing `xy0` and `extent` 

515 attributes giving bounding box information in the lsst format 

516 `(x, y)`. 

517 

518 Returns 

519 ------- 

520 bbox : `scarlet.bbox.Box` 

521 The scarlet bounding box generated by the bounds. 

522 """ 

523 xy0 = (0, ) + boundedData.xy0[::-1] 

524 extent = (nBands, ) + boundedData.extent[::-1] 

525 bbox = Box(shape=extent, origin=xy0) 

526 return bbox 

527 

528 

529class ComponentCube: 

530 """Dummy component for scarlet main sources. 

531 

532 This is duck-typed to a `scarlet.lite.LiteComponent` in order to 

533 generate a model from the component. 

534 

535 If scarlet lite ever implements a component as a data cube, 

536 this class can be removed. 

537 """ 

538 def __init__(self, model, center, bbox, model_bbox): 

539 """Initialization 

540 

541 Parameters 

542 ---------- 

543 model : `numpy.ndarray` 

544 The 3D (bands, y, x) model of the component. 

545 center : `tuple` of `int` 

546 The `(y, x)` center of the component. 

547 bbox : `scarlet.bbox.Box` 

548 The bounding box of the component. 

549 `model_bbox` : `scarlet.bbox.Box` 

550 The bounding box of the entire blend. 

551 """ 

552 self.model = model 

553 self.center = center 

554 self.bbox = bbox 

555 

556 def get_model(self, bbox=None): 

557 """Generate the model for the source 

558 

559 Parameters 

560 ---------- 

561 bbox : `scarlet.bbox.Box` 

562 The bounding box to insert the model into. 

563 If `bbox` is `None` then the model is returned in its own 

564 bounding box. 

565 

566 Returns 

567 ------- 

568 model : `numpy.ndarray` 

569 The model as a 3D `(band, y, x)` array. 

570 """ 

571 model = self.model 

572 if bbox is not None: 

573 slices = overlapped_slices(bbox, self.bbox) 

574 _model = np.zeros(bbox.shape, model.dtype) 

575 _model[slices[0]] = model[slices[1]] 

576 model = _model 

577 return model 

578 

579 

580class DummyParameter(LiteParameter): 

581 """A parameter place holder 

582 

583 Models in scarlet have parameters, not arrays, 

584 for their sed's and morphologies, so this wrapper for 

585 the SED and morphology arrays implements the required 

586 methods and attributes. 

587 """ 

588 def __init__(self, x): 

589 self.x = x 

590 self.grad = None 

591 

592 def update(self, it, input_grad, *args): 

593 pass 

594 

595 def grow(self, new_shape, dist): 

596 pass 

597 

598 def shrink(self, dist): 

599 pass 

600 

601 

602class DummyObservation(LiteObservation): 

603 """An observation that does not have any image data 

604 

605 In order to reproduce a model in an observed seeing we make use of the 

606 scarlet `LiteObservation` class, but since we are not fitting the model 

607 to data we can use empty arrays for the image, variance, and weight data, 

608 and zero for the `noise_rms`. 

609 

610 Parameters 

611 ---------- 

612 psfs : `numpy.ndarray` 

613 The array of PSF images in each band 

614 psf_model : `numpy.ndarray` 

615 The image of the model PSF. 

616 bbox : `scarlet.bbox.Box` 

617 dtype : `numpy.dtype` 

618 The data type of the model that is generated. 

619 """ 

620 def __init__(self, psfs, model_psf, bbox, dtype): 

621 dummyImage = np.zeros([], dtype=dtype) 

622 

623 super().__init__( 

624 images=dummyImage, 

625 variance=dummyImage, 

626 weights=dummyImage, 

627 psfs=psfs, 

628 model_psf=model_psf, 

629 convolution_mode="real", 

630 noise_rms=0, 

631 bbox=bbox, 

632 ) 

633 

634 

635def dataToScarlet(blendData, nBands=None, bandIndex=None, dtype=np.float32): 

636 """Convert the storage data model into a scarlet lite blend 

637 

638 Parameters 

639 ---------- 

640 blendData : `ScarletBlendData` 

641 Persistable data for the entire blend. 

642 nBands : `int` 

643 The number of bands in the image. 

644 If `bandIndex` is `None` then this parameter is ignored and 

645 the number of bands is set to 1. 

646 bandIndex : `int` 

647 Index of model to extract. If `bandIndex` is `None` then the 

648 full model is extracted. 

649 dtype : `numpy.dtype` 

650 The data type of the model that is generated. 

651 

652 Returns 

653 ------- 

654 blend : `scarlet.lite.LiteBlend` 

655 A scarlet blend model extracted from persisted data. 

656 """ 

657 if bandIndex is not None: 

658 nBands = 1 

659 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0)) 

660 sources = [] 

661 for sourceId, sourceData in blendData.sources.items(): 

662 components = [] 

663 for componentData in sourceData.components: 

664 bbox = boundedDataToBox(nBands, componentData) 

665 if bandIndex is None: 

666 model = componentData.model 

667 else: 

668 model = componentData.model[bandIndex][None, :, :] 

669 component = ComponentCube( 

670 model=model, 

671 center=tuple(componentData.center[::-1]), 

672 bbox=bbox, 

673 ) 

674 components.append(component) 

675 for componentData in sourceData.factorizedComponents: 

676 bbox = boundedDataToBox(nBands, componentData) 

677 # Add dummy values for properties only needed for 

678 # model fitting. 

679 if bandIndex is None: 

680 sed = componentData.sed 

681 else: 

682 sed = componentData.sed[bandIndex:bandIndex+1] 

683 sed = DummyParameter(sed) 

684 morph = DummyParameter(componentData.morph) 

685 # Note: since we aren't fitting a model, we don't need to 

686 # set the RMS of the background. 

687 # We set it to NaN just to be safe. 

688 component = LiteFactorizedComponent( 

689 sed=sed, 

690 morph=morph, 

691 center=tuple(componentData.center[::-1]), 

692 bbox=bbox, 

693 model_bbox=modelBox, 

694 bg_rms=np.nan 

695 ) 

696 components.append(component) 

697 

698 source = LiteSource(components=components, dtype=dtype) 

699 source.recordId = sourceId 

700 source.peakId = sourceData.peakId 

701 sources.append(source) 

702 

703 return LiteBlend(sources=sources, observation=None) 

704 

705 

706def scarletLiteToData(blend, psfCenter, xy0): 

707 """Convert a scarlet lite blend into a persistable data object 

708 

709 Parameters 

710 ---------- 

711 blend : `scarlet.lite.LiteBlend` 

712 The blend that is being persisted. 

713 psfCenter : `tuple` of `int` 

714 The center of the PSF. 

715 xy0 : `tuple` of `int` 

716 The lower coordinate of the entire blend. 

717 

718 Returns 

719 ------- 

720 blendData : `ScarletBlendDataModel` 

721 The data model for a single blend. 

722 """ 

723 sources = {} 

724 for source in blend.sources: 

725 components = [] 

726 for component in source.components: 

727 if isinstance(component, LiteFactorizedComponent): 

728 componentData = ScarletFactorizedComponentData( 

729 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]), 

730 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]), 

731 center=tuple(int(x) for x in component.center[::-1]), 

732 sed=component.sed, 

733 morph=component.morph, 

734 ) 

735 else: 

736 componentData = ScarletComponentData( 

737 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]), 

738 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]), 

739 center=tuple(int(x) for x in component.center[::-1]), 

740 model=component.get_model(), 

741 ) 

742 components.append(componentData) 

743 sourceData = ScarletSourceData( 

744 components=[], 

745 factorizedComponents=components, 

746 peakId=source.peakId, 

747 ) 

748 sources[source.recordId] = sourceData 

749 

750 blendData = ScarletBlendData( 

751 xy0=(xy0.x, xy0.y), 

752 extent=blend.observation.bbox.shape[1:][::-1], 

753 sources=sources, 

754 psfCenter=psfCenter, 

755 ) 

756 

757 return blendData 

758 

759 

760def scarletToData(blend, psfCenter, xy0): 

761 """Convert a scarlet blend into a persistable data object 

762 

763 Parameters 

764 ---------- 

765 blend : `scarlet.Blend` 

766 The blend that is being persisted. 

767 psfCenter : `tuple` of `int` 

768 The center of the PSF. 

769 xy0 : `tuple` of `int` 

770 The lower coordinate of the entire blend. 

771 

772 Returns 

773 ------- 

774 blendData : `ScarletBlendDataModel` 

775 The data model for a single blend. 

776 """ 

777 sources = {} 

778 for source in blend.sources: 

779 componentData = ScarletComponentData( 

780 xy0=tuple(int(x) for x in source.bbox.origin[1:][::-1]), 

781 extent=tuple(int(x) for x in source.bbox.shape[1:][::-1]), 

782 center=tuple(int(x) for x in source.center[::-1]), 

783 model=source.get_model(), 

784 ) 

785 

786 sourceData = ScarletSourceData( 

787 components=[componentData], 

788 factorizedComponents=[], 

789 peakId=source.peakId, 

790 ) 

791 sources[source.recordId] = sourceData 

792 

793 blendData = ScarletBlendData( 

794 xy0=(int(xy0.x), int(xy0.y)), 

795 extent=tuple(int(x) for x in blend.observation.bbox.shape[1:][::-1]), 

796 sources=sources, 

797 psfCenter=psfCenter, 

798 ) 

799 

800 return blendData