Coverage for python/lsst/meas/extensions/scarlet/io.py: 27%

240 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2022-08-31 04:48 -0700

1from __future__ import annotations 

2 

3from dataclasses import dataclass 

4import json 

5from typing import Any 

6import numpy as np 

7from scarlet.bbox import Box, overlapped_slices 

8from scarlet.lite import LiteBlend, LiteFactorizedComponent, LiteObservation, LiteSource, LiteParameter 

9from scarlet.lite.measure import weight_sources 

10 

11from lsst.geom import Box2I, Extent2I, Point2I, Point2D 

12from lsst.afw.detection.multiband import heavyFootprintToImage 

13 

14from .source import liteModelToHeavy 

15 

16__all__ = [ 

17 "ScarletComponentData", 

18 "ScarletFactorizedComponentData", 

19 "ScarletSourceData", 

20 "ScarletBlendData", 

21 "ScarletModelData", 

22 "updateBlendRecords", 

23 "boundedDataToBox", 

24 "ComponentCube", 

25 "dataToScarlet", 

26 "scarletLiteToData", 

27 "scarletToData", 

28] 

29 

30 

31@dataclass 

32class ScarletComponentData: 

33 """Data for a component expressed as a 3D data cube 

34 

35 For now this is used for scarlet main source models because 

36 their structure is too complex to persist in the same 

37 way that scarlet lite components can be persisted. 

38 

39 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention, 

40 not the scarlet/C++ ``(y, x)`` convention. 

41 

42 Attributes 

43 ---------- 

44 xy0 : `tuple` of `int` 

45 The lower bound of the components bounding box. 

46 extent : `tuple` of `int` 

47 The `(width, height)` of the component array. 

48 center : `tuple` of `int` 

49 The center of the component. 

50 model : `numpy.ndarray` 

51 The model for the component. 

52 """ 

53 xy0: tuple[int, int] 

54 extent: tuple[int, int] 

55 center: tuple[float, float] 

56 model: np.ndarray 

57 

58 def asDict(self) -> dict: 

59 """Return the object encoded into a dict for JSON serialization 

60 

61 Returns 

62 ------- 

63 result : `dict` 

64 The object encoded as a JSON compatible dict 

65 """ 

66 return { 

67 "xy0": self.xy0, 

68 "extent": self.extent, 

69 "center": self.extent, 

70 "model": tuple(self.model.flatten().astype(float)) 

71 } 

72 

73 @classmethod 

74 def fromDict(cls, data: dict) -> "ScarletComponentData": 

75 """Reconstruct `ScarletComponentData` from JSON compatible dict 

76 

77 Parameters 

78 ---------- 

79 data : `dict` 

80 Dictionary representation of the object 

81 

82 Returns 

83 ------- 

84 result : `ScarletComponentData` 

85 The reconstructed object 

86 """ 

87 dataShallowCopy = dict(data) 

88 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

89 dataShallowCopy["extent"] = tuple(data["extent"]) 

90 shape = dataShallowCopy['extent'][::-1] 

91 numBands = shape[0] * shape[1] 

92 dataShallowCopy['model'] = np.array(data['model']).reshape((numBands,) + shape).astype(np.float32) 

93 return cls(**dataShallowCopy) 

94 

95 

96@dataclass 

97class ScarletFactorizedComponentData: 

98 """Data for a factorized component 

99 

100 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention, 

101 not the scarlet/C++ ``(y, x)`` convention. 

102 

103 Attributes 

104 ---------- 

105 xy0 : `tuple` of `int` 

106 The lower bound of the components bounding box. 

107 extent : `tuple` of `int` 

108 The `(width, height)` of the component array. 

109 center : `tuple` of `int` 

110 The ``(x, y)`` center of the component. 

111 Note: once this is converted into a scarlet `LiteBlend` the source has 

112 the traditional c++ `(y, x)` ordering. 

113 sed : `numpy.ndarray` 

114 The SED of the component. 

115 morph : `numpy.ndarray` 

116 The 2D morphology of the component. 

117 """ 

118 xy0: tuple[int, int] 

119 extent: tuple[int, int] 

120 center: tuple[float, float] 

121 sed: np.ndarray 

122 morph: np.ndarray 

123 

124 def asDict(self) -> dict: 

125 """Return the object encoded into a dict for JSON serialization 

126 

127 Returns 

128 ------- 

129 result : `dict` 

130 The object encoded as a JSON compatible dict 

131 """ 

132 return { 

133 "xy0": self.xy0, 

134 "extent": self.extent, 

135 "center": self.center, 

136 "sed": tuple(self.sed.astype(float)), 

137 "morph": tuple(self.morph.flatten().astype(float)) 

138 } 

139 

140 @classmethod 

141 def fromDict(cls, data: dict) -> "ScarletFactorizedComponentData": 

142 """Reconstruct `ScarletFactorizedComponentData` from JSON compatible 

143 dict. 

144 

145 Parameters 

146 ---------- 

147 data : `dict` 

148 Dictionary representation of the object 

149 

150 Returns 

151 ------- 

152 result : `ScarletFactorizedComponentData` 

153 The reconstructed object 

154 """ 

155 dataShallowCopy = dict(data) 

156 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

157 dataShallowCopy["extent"] = tuple(data["extent"]) 

158 shape = dataShallowCopy['extent'][::-1] 

159 dataShallowCopy["sed"] = np.array(data["sed"]).astype(np.float32) 

160 dataShallowCopy['morph'] = np.array(data['morph']).reshape(shape).astype(np.float32) 

161 return cls(**dataShallowCopy) 

162 

163 

164@dataclass 

165class ScarletSourceData: 

166 """Data for a scarlet source 

167 

168 Attributes 

169 ---------- 

170 components : `list` of `ScarletComponentData` 

171 The components contained in the source that are not factorized. 

172 factorizedComponents : `list` of `ScarletFactorizedComponentData` 

173 The components contained in the source that are factorized. 

174 peakId : `int` 

175 The peak ID of the source in it's parent's footprint peak catalog. 

176 """ 

177 components: list[ScarletComponentData] 

178 factorizedComponents: list[ScarletFactorizedComponentData] 

179 peakId: int 

180 

181 def asDict(self) -> dict: 

182 """Return the object encoded into a dict for JSON serialization 

183 

184 Returns 

185 ------- 

186 result : `dict` 

187 The object encoded as a JSON compatible dict 

188 """ 

189 result = { 

190 "components": [], 

191 "factorized": [], 

192 "peakId": self.peakId, 

193 } 

194 for component in self.components: 

195 reduced = component.asDict() 

196 result["components"].append(reduced) 

197 

198 for component in self.factorizedComponents: 

199 reduced = component.asDict() 

200 result["factorized"].append(reduced) 

201 return result 

202 

203 @classmethod 

204 def fromDict(cls, data: dict) -> "ScarletSourceData": 

205 """Reconstruct `ScarletSourceData` from JSON compatible 

206 dict. 

207 

208 Parameters 

209 ---------- 

210 data : `dict` 

211 Dictionary representation of the object 

212 

213 Returns 

214 ------- 

215 result : `ScarletSourceData` 

216 The reconstructed object 

217 """ 

218 dataShallowCopy = dict(data) 

219 del dataShallowCopy["factorized"] 

220 components = [] 

221 for component in data['components']: 

222 component = ScarletComponentData.fromDict(component) 

223 components.append(component) 

224 dataShallowCopy['components'] = components 

225 

226 factorized = [] 

227 for component in data["factorized"]: 

228 component = ScarletFactorizedComponentData.fromDict(component) 

229 factorized.append(component) 

230 dataShallowCopy['factorizedComponents'] = factorized 

231 dataShallowCopy["peakId"] = int(data["peakId"]) 

232 return cls(**dataShallowCopy) 

233 

234 

235@dataclass 

236class ScarletBlendData: 

237 """Data for an entire blend. 

238 

239 Note that `xy0`, `extent`, and `psfCenter` use lsst ``(x, y)`` convention, 

240 not the scarlet/C++ ``(y, x)`` convention. 

241 

242 Attributes 

243 ---------- 

244 xy0 : `tuple` of `int` 

245 The lower bound of the components bounding box. 

246 extent : `tuple` of `int` 

247 The `(width, height)` of the component array. 

248 sources : `dict` of `int`: `ScarletSourceData` 

249 Data for the sources contained in the blend. 

250 psfCenter : `tuple` of `int` 

251 The location used for the center of the PSF for 

252 the blend. 

253 """ 

254 xy0: tuple[int, int] 

255 extent: tuple[int, int] 

256 sources: dict[int, ScarletSourceData] 

257 psfCenter: tuple[float, float] 

258 

259 def asDict(self) -> dict: 

260 """Return the object encoded into a dict for JSON serialization 

261 

262 Returns 

263 ------- 

264 result : `dict` 

265 The object encoded as a JSON compatible dict 

266 """ 

267 result: dict[str, Any] = {"xy0": self.xy0, "extent": self.extent, "psfCenter": self.psfCenter} 

268 result['sources'] = {id: source.asDict() for id, source in self.sources.items()} 

269 return result 

270 

271 @classmethod 

272 def fromDict(cls, data: dict) -> "ScarletBlendData": 

273 """Reconstruct `ScarletBlendData` from JSON compatible 

274 dict. 

275 

276 Parameters 

277 ---------- 

278 data : `dict` 

279 Dictionary representation of the object 

280 

281 Returns 

282 ------- 

283 result : `ScarletBlendData` 

284 The reconstructed object 

285 """ 

286 dataShallowCopy = dict(data) 

287 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

288 dataShallowCopy["extent"] = tuple(data["extent"]) 

289 dataShallowCopy["psfCenter"] = tuple(data["psfCenter"]) 

290 dataShallowCopy["sources"] = {int(id): ScarletSourceData.fromDict(source) 

291 for id, source in data['sources'].items()} 

292 return cls(**dataShallowCopy) 

293 

294 

295class ScarletModelData: 

296 """A container that propagates scarlet models for an entire `SourceCatalog` 

297 """ 

298 def __init__(self, filters, psf, blends=None): 

299 """Initialize an instance 

300 

301 Parameters 

302 ---------- 

303 filters : `list` of `str` 

304 The names of the filters. 

305 The order of the filters must be the same as the order of 

306 the multiband model arrays, and SEDs. 

307 psf : `numpy.ndarray` 

308 The 2D array of the PSF in scarlet model space. 

309 This is typically a narrow Gaussian integrated over the 

310 pixels in the exposure. 

311 blends : `dict` of [`int`: `ScarletBlendData`] 

312 Initial `dict` that maps parent IDs from the source catalog 

313 to the scarlet model data for the parent blend. 

314 """ 

315 self.filters = filters 

316 self.psf = psf 

317 if blends is None: 

318 blends = {} 

319 self.blends = blends 

320 

321 def json(self) -> str: 

322 """Serialize the data model to a JSON formatted string 

323 

324 Returns 

325 ------- 

326 result : `str` 

327 The result of the object converted into a JSON format 

328 """ 

329 result = { 

330 "filters": self.filters, 

331 "psfShape": self.psf.shape, 

332 "psf": list(self.psf.flatten()), 

333 "blends": {id: blend.asDict() for id, blend in self.blends.items()} 

334 } 

335 return json.dumps(result) 

336 

337 @classmethod 

338 def parse_obj(cls, data: dict) -> "ScarletModelData": 

339 """Construct a ScarletModelData from python decoded JSON object. 

340 

341 Parameters 

342 ---------- 

343 inMemoryDataset : `Mapping` 

344 The result of json.load(s) on a JSON persisted ScarletModelData 

345 

346 Returns 

347 ------- 

348 result : `ScarletModelData` 

349 The `ScarletModelData` that was loaded the from the input object 

350 """ 

351 dataShallowCopy = dict(data) 

352 modelPsf = np.array( 

353 dataShallowCopy["psf"]).reshape(dataShallowCopy.pop("psfShape")).astype(np.float32) 

354 dataShallowCopy["psf"] = modelPsf 

355 dataShallowCopy["blends"] = { 

356 int(id): ScarletBlendData.fromDict(blend) 

357 for id, blend in data['blends'].items() 

358 } 

359 return cls(**dataShallowCopy) 

360 

361 def updateCatalogFootprints(self, catalog, band, psfModel, redistributeImage=None, 

362 removeScarletData=True, updateFluxColumns=True): 

363 """Use the scarlet models to set HeavyFootprints for modeled sources 

364 

365 Parameters 

366 ---------- 

367 catalog : `lsst.afw.table.SourceCatalog` 

368 The catalog missing heavy footprints for deblended sources. 

369 band : `str` 

370 The name of the band that the catalog data describes. 

371 psfModel : `lsst.afw.detection.Psf` 

372 The observed PSF model for the catalog. 

373 redistributeImage : `lsst.afw.image.Image` 

374 The image that is the source for flux re-distribution. 

375 If `redistributeImage` is `None` then flux re-distribution is 

376 not performed. 

377 removeScarletData : `bool` 

378 Whether or not to remove `ScarletBlendData` for each blend 

379 in order to save memory. 

380 updateFluxColumns : `bool` 

381 Whether or not to update the `deblend_*` columns in the catalog. 

382 This should only be true when the input catalog schema already 

383 contains those columns. 

384 """ 

385 # Iterate over the blends, since flux re-distribution must be done on 

386 # all of the children with the same parent 

387 parents = catalog[catalog["parent"] == 0] 

388 # Get the index of the model for the given band 

389 bandIndex = self.filters.index(band) 

390 

391 for parentRecord in parents: 

392 parentId = parentRecord.getId() 

393 

394 try: 

395 blendModel = self.blends[parentId] 

396 except KeyError: 

397 # The parent was skipped in the deblender, so there are 

398 # no models for its sources. 

399 continue 

400 updateBlendRecords( 

401 blendData=blendModel, 

402 catalog=catalog, 

403 modelPsf=self.psf, 

404 observedPsf=psfModel, 

405 redistributeImage=redistributeImage, 

406 bandIndex=bandIndex, 

407 parentFootprint=parentRecord.getFootprint(), 

408 updateFluxColumns=updateFluxColumns, 

409 ) 

410 

411 # Save memory by removing the data for the blend 

412 if removeScarletData: 

413 del self.blends[parentId] 

414 

415 

416def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, redistributeImage, bandIndex, 

417 parentFootprint, updateFluxColumns): 

418 """Create footprints and update band-dependent columns in the catalog 

419 

420 Parameters 

421 ---------- 

422 blendData : `ScarletBlendData` 

423 Persistable data for the entire blend. 

424 catalog : `lsst.afw.table.SourceCatalog` 

425 The catalog that is being updated. 

426 modelPsf : `numpy.ndarray` 

427 The 2D model of the PSF. 

428 observedPsf : `lsst.afw.detection.Psf` 

429 The observed PSF model for the catalog. 

430 redistributeImage : `lsst.afw.image.Image` 

431 The image that is the source for flux re-distribution. 

432 If `redistributeImage` is `None` then flux re-distribution is 

433 not performed. 

434 bandIndex : `int` 

435 The number of the band to extract. 

436 parentFootprint : `lsst.afw.Footprint` 

437 The footprint of the parent, used for masking out the model 

438 when re-distributing flux. 

439 updateFluxColumns : `bool` 

440 Whether or not to update the `deblend_*` columns in the catalog. 

441 This should only be true when the input catalog schema already 

442 contains those columns. 

443 """ 

444 # We import here to avoid a circular dependency 

445 from .scarletDeblendTask import setDeblenderMetrics, getFootprintMask 

446 

447 useFlux = redistributeImage is not None 

448 xy0 = Point2I(*blendData.xy0) 

449 

450 blend = dataToScarlet( 

451 blendData=blendData, 

452 nBands=1, 

453 bandIndex=bandIndex, 

454 dtype=np.float32, 

455 ) 

456 

457 position = Point2D(*blendData.psfCenter) 

458 psfs = observedPsf.computeKernelImage(position).array[None, :, :] 

459 modelBox = Box((1,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0)) 

460 blend.observation = DummyObservation( 

461 psfs=psfs, 

462 model_psf=modelPsf[None, :, :], 

463 bbox=modelBox, 

464 dtype=np.float32, 

465 ) 

466 

467 # Set the metrics for the blend. 

468 # TODO: remove this once DM-34558 runs all deblender metrics 

469 # in a separate task. 

470 if updateFluxColumns: 

471 setDeblenderMetrics(blend) 

472 

473 # Update the source models if the scarlet models are used as 

474 # templates to re-distribute flux from an observation 

475 if useFlux: 

476 # Extract the image array to re-distribute its flux 

477 extent = Extent2I(*blendData.extent) 

478 bbox = Box2I(xy0, extent) 

479 blend.observation.images = redistributeImage[bbox].array[None, :, :] 

480 blend.observation.weights = ~getFootprintMask(parentFootprint, None)[None, :, :] 

481 # Re-distribute the flux for each source in-place 

482 weight_sources(blend) 

483 

484 # Update the HeavyFootprints for deblended sources 

485 # and update the band-dependent catalog columns. 

486 for source in blend.sources: 

487 sourceRecord = catalog.find(source.recordId) 

488 parent = catalog.find(sourceRecord["parent"]) 

489 peaks = parent.getFootprint().peaks 

490 peakIdx = np.where(peaks["id"] == source.peakId)[0][0] 

491 source.detectedPeak = peaks[peakIdx] 

492 # Set the Footprint 

493 heavy = liteModelToHeavy( 

494 source=source, 

495 blend=blend, 

496 xy0=xy0, 

497 useFlux=useFlux, 

498 ) 

499 sourceRecord.setFootprint(heavy) 

500 

501 if updateFluxColumns: 

502 # Set the flux of the scarlet model 

503 # TODO: this field should probably be deprecated, 

504 # since DM-33710 gives users access to the scarlet models. 

505 model = source.get_model()[0] 

506 sourceRecord.set("deblend_scarletFlux", np.sum(model)) 

507 

508 # Set the flux at the center of the model 

509 peak = heavy.peaks[0] 

510 img = heavyFootprintToImage(heavy, fill=0.0) 

511 sourceRecord.set("deblend_peak_instFlux", img.image[Point2I(peak["i_x"], peak["i_y"])]) 

512 

513 # Set the metrics columns. 

514 # TODO: remove this once DM-34558 runs all deblender metrics 

515 # in a separate task. 

516 sourceRecord.set("deblend_maxOverlap", source.metrics.maxOverlap[0]) 

517 sourceRecord.set("deblend_fluxOverlap", source.metrics.fluxOverlap[0]) 

518 sourceRecord.set("deblend_fluxOverlapFraction", source.metrics.fluxOverlapFraction[0]) 

519 sourceRecord.set("deblend_blendedness", source.metrics.blendedness[0]) 

520 

521 

522def boundedDataToBox(nBands, boundedData): 

523 """Convert bounds from the data storage format to a `scarlet.bbox.Box` 

524 

525 Parameters 

526 ---------- 

527 nBands : `int` 

528 The number of bands in the model. 

529 boundedData : 

530 The scarlet data object containing `xy0` and `extent` 

531 attributes giving bounding box information in the lsst format 

532 `(x, y)`. 

533 

534 Returns 

535 ------- 

536 bbox : `scarlet.bbox.Box` 

537 The scarlet bounding box generated by the bounds. 

538 """ 

539 xy0 = (0, ) + boundedData.xy0[::-1] 

540 extent = (nBands, ) + boundedData.extent[::-1] 

541 bbox = Box(shape=extent, origin=xy0) 

542 return bbox 

543 

544 

545class ComponentCube: 

546 """Dummy component for scarlet main sources. 

547 

548 This is duck-typed to a `scarlet.lite.LiteComponent` in order to 

549 generate a model from the component. 

550 

551 If scarlet lite ever implements a component as a data cube, 

552 this class can be removed. 

553 """ 

554 def __init__(self, model, center, bbox, model_bbox): 

555 """Initialization 

556 

557 Parameters 

558 ---------- 

559 model : `numpy.ndarray` 

560 The 3D (bands, y, x) model of the component. 

561 center : `tuple` of `int` 

562 The `(y, x)` center of the component. 

563 bbox : `scarlet.bbox.Box` 

564 The bounding box of the component. 

565 `model_bbox` : `scarlet.bbox.Box` 

566 The bounding box of the entire blend. 

567 """ 

568 self.model = model 

569 self.center = center 

570 self.bbox = bbox 

571 

572 def get_model(self, bbox=None): 

573 """Generate the model for the source 

574 

575 Parameters 

576 ---------- 

577 bbox : `scarlet.bbox.Box` 

578 The bounding box to insert the model into. 

579 If `bbox` is `None` then the model is returned in its own 

580 bounding box. 

581 

582 Returns 

583 ------- 

584 model : `numpy.ndarray` 

585 The model as a 3D `(band, y, x)` array. 

586 """ 

587 model = self.model 

588 if bbox is not None: 

589 slices = overlapped_slices(bbox, self.bbox) 

590 _model = np.zeros(bbox.shape, model.dtype) 

591 _model[slices[0]] = model[slices[1]] 

592 model = _model 

593 return model 

594 

595 

596class DummyParameter(LiteParameter): 

597 """A parameter place holder 

598 

599 Models in scarlet have parameters, not arrays, 

600 for their sed's and morphologies, so this wrapper for 

601 the SED and morphology arrays implements the required 

602 methods and attributes. 

603 """ 

604 def __init__(self, x): 

605 self.x = x 

606 self.grad = None 

607 

608 def update(self, it, input_grad, *args): 

609 pass 

610 

611 def grow(self, new_shape, dist): 

612 pass 

613 

614 def shrink(self, dist): 

615 pass 

616 

617 

618class DummyObservation(LiteObservation): 

619 """An observation that does not have any image data 

620 

621 In order to reproduce a model in an observed seeing we make use of the 

622 scarlet `LiteObservation` class, but since we are not fitting the model 

623 to data we can use empty arrays for the image, variance, and weight data, 

624 and zero for the `noise_rms`. 

625 

626 Parameters 

627 ---------- 

628 psfs : `numpy.ndarray` 

629 The array of PSF images in each band 

630 psf_model : `numpy.ndarray` 

631 The image of the model PSF. 

632 bbox : `scarlet.bbox.Box` 

633 dtype : `numpy.dtype` 

634 The data type of the model that is generated. 

635 """ 

636 def __init__(self, psfs, model_psf, bbox, dtype): 

637 dummyImage = np.zeros([], dtype=dtype) 

638 

639 super().__init__( 

640 images=dummyImage, 

641 variance=dummyImage, 

642 weights=dummyImage, 

643 psfs=psfs, 

644 model_psf=model_psf, 

645 convolution_mode="real", 

646 noise_rms=0, 

647 bbox=bbox, 

648 ) 

649 

650 

651def dataToScarlet(blendData, nBands=None, bandIndex=None, dtype=np.float32): 

652 """Convert the storage data model into a scarlet lite blend 

653 

654 Parameters 

655 ---------- 

656 blendData : `ScarletBlendData` 

657 Persistable data for the entire blend. 

658 nBands : `int` 

659 The number of bands in the image. 

660 If `bandIndex` is `None` then this parameter is ignored and 

661 the number of bands is set to 1. 

662 bandIndex : `int` 

663 Index of model to extract. If `bandIndex` is `None` then the 

664 full model is extracted. 

665 dtype : `numpy.dtype` 

666 The data type of the model that is generated. 

667 

668 Returns 

669 ------- 

670 blend : `scarlet.lite.LiteBlend` 

671 A scarlet blend model extracted from persisted data. 

672 """ 

673 if bandIndex is not None: 

674 nBands = 1 

675 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0)) 

676 sources = [] 

677 for sourceId, sourceData in blendData.sources.items(): 

678 components = [] 

679 for componentData in sourceData.components: 

680 bbox = boundedDataToBox(nBands, componentData) 

681 if bandIndex is None: 

682 model = componentData.model 

683 else: 

684 model = componentData.model[bandIndex][None, :, :] 

685 component = ComponentCube( 

686 model=model, 

687 center=tuple(componentData.center[::-1]), 

688 bbox=bbox, 

689 ) 

690 components.append(component) 

691 for componentData in sourceData.factorizedComponents: 

692 bbox = boundedDataToBox(nBands, componentData) 

693 # Add dummy values for properties only needed for 

694 # model fitting. 

695 if bandIndex is None: 

696 sed = componentData.sed 

697 else: 

698 sed = componentData.sed[bandIndex:bandIndex+1] 

699 sed = DummyParameter(sed) 

700 morph = DummyParameter(componentData.morph) 

701 # Note: since we aren't fitting a model, we don't need to 

702 # set the RMS of the background. 

703 # We set it to NaN just to be safe. 

704 component = LiteFactorizedComponent( 

705 sed=sed, 

706 morph=morph, 

707 center=tuple(componentData.center[::-1]), 

708 bbox=bbox, 

709 model_bbox=modelBox, 

710 bg_rms=np.nan 

711 ) 

712 components.append(component) 

713 

714 source = LiteSource(components=components, dtype=dtype) 

715 source.recordId = sourceId 

716 source.peakId = sourceData.peakId 

717 sources.append(source) 

718 

719 return LiteBlend(sources=sources, observation=None) 

720 

721 

722def scarletLiteToData(blend, psfCenter, xy0): 

723 """Convert a scarlet lite blend into a persistable data object 

724 

725 Parameters 

726 ---------- 

727 blend : `scarlet.lite.LiteBlend` 

728 The blend that is being persisted. 

729 psfCenter : `tuple` of `int` 

730 The center of the PSF. 

731 xy0 : `tuple` of `int` 

732 The lower coordinate of the entire blend. 

733 

734 Returns 

735 ------- 

736 blendData : `ScarletBlendDataModel` 

737 The data model for a single blend. 

738 """ 

739 sources = {} 

740 for source in blend.sources: 

741 components = [] 

742 for component in source.components: 

743 if isinstance(component, LiteFactorizedComponent): 

744 componentData = ScarletFactorizedComponentData( 

745 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]), 

746 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]), 

747 center=tuple(int(x) for x in component.center[::-1]), 

748 sed=component.sed, 

749 morph=component.morph, 

750 ) 

751 else: 

752 componentData = ScarletComponentData( 

753 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]), 

754 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]), 

755 center=tuple(int(x) for x in component.center[::-1]), 

756 model=component.get_model(), 

757 ) 

758 components.append(componentData) 

759 sourceData = ScarletSourceData( 

760 components=[], 

761 factorizedComponents=components, 

762 peakId=source.peakId, 

763 ) 

764 sources[source.recordId] = sourceData 

765 

766 blendData = ScarletBlendData( 

767 xy0=(xy0.x, xy0.y), 

768 extent=blend.observation.bbox.shape[1:][::-1], 

769 sources=sources, 

770 psfCenter=psfCenter, 

771 ) 

772 

773 return blendData 

774 

775 

776def scarletToData(blend, psfCenter, xy0): 

777 """Convert a scarlet blend into a persistable data object 

778 

779 Parameters 

780 ---------- 

781 blend : `scarlet.Blend` 

782 The blend that is being persisted. 

783 psfCenter : `tuple` of `int` 

784 The center of the PSF. 

785 xy0 : `tuple` of `int` 

786 The lower coordinate of the entire blend. 

787 

788 Returns 

789 ------- 

790 blendData : `ScarletBlendDataModel` 

791 The data model for a single blend. 

792 """ 

793 sources = {} 

794 for source in blend.sources: 

795 componentData = ScarletComponentData( 

796 xy0=tuple(int(x) for x in source.bbox.origin[1:][::-1]), 

797 extent=tuple(int(x) for x in source.bbox.shape[1:][::-1]), 

798 center=tuple(int(x) for x in source.center[::-1]), 

799 model=source.get_model(), 

800 ) 

801 

802 sourceData = ScarletSourceData( 

803 components=[componentData], 

804 factorizedComponents=[], 

805 peakId=source.peakId, 

806 ) 

807 sources[source.recordId] = sourceData 

808 

809 blendData = ScarletBlendData( 

810 xy0=(int(xy0.x), int(xy0.y)), 

811 extent=tuple(int(x) for x in blend.observation.bbox.shape[1:][::-1]), 

812 sources=sources, 

813 psfCenter=psfCenter, 

814 ) 

815 

816 return blendData