Coverage for python/lsst/meas/extensions/scarlet/io.py: 27%

249 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2022-09-14 10:26 +0000

1from __future__ import annotations 

2 

3from dataclasses import dataclass 

4import json 

5from typing import Any 

6import logging 

7import numpy as np 

8from scarlet.bbox import Box, overlapped_slices 

9from scarlet.lite import LiteBlend, LiteFactorizedComponent, LiteObservation, LiteSource, LiteParameter 

10from scarlet.lite.measure import weight_sources 

11 

12from lsst.geom import Box2I, Extent2I, Point2I, Point2D 

13from lsst.afw.detection.multiband import heavyFootprintToImage 

14 

15from .source import liteModelToHeavy 

16 

17__all__ = [ 

18 "ScarletComponentData", 

19 "ScarletFactorizedComponentData", 

20 "ScarletSourceData", 

21 "ScarletBlendData", 

22 "ScarletModelData", 

23 "updateBlendRecords", 

24 "boundedDataToBox", 

25 "ComponentCube", 

26 "dataToScarlet", 

27 "scarletLiteToData", 

28 "scarletToData", 

29] 

30 

31logger = logging.getLogger(__name__) 

32 

33 

34@dataclass 

35class ScarletComponentData: 

36 """Data for a component expressed as a 3D data cube 

37 

38 For now this is used for scarlet main source models because 

39 their structure is too complex to persist in the same 

40 way that scarlet lite components can be persisted. 

41 

42 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention, 

43 not the scarlet/C++ ``(y, x)`` convention. 

44 

45 Attributes 

46 ---------- 

47 xy0 : `tuple` of `int` 

48 The lower bound of the components bounding box. 

49 extent : `tuple` of `int` 

50 The `(width, height)` of the component array. 

51 center : `tuple` of `int` 

52 The center of the component. 

53 model : `numpy.ndarray` 

54 The model for the component. 

55 """ 

56 xy0: tuple[int, int] 

57 extent: tuple[int, int] 

58 center: tuple[float, float] 

59 model: np.ndarray 

60 

61 def asDict(self) -> dict: 

62 """Return the object encoded into a dict for JSON serialization 

63 

64 Returns 

65 ------- 

66 result : `dict` 

67 The object encoded as a JSON compatible dict 

68 """ 

69 return { 

70 "xy0": self.xy0, 

71 "extent": self.extent, 

72 "center": self.extent, 

73 "model": tuple(self.model.flatten().astype(float)) 

74 } 

75 

76 @classmethod 

77 def fromDict(cls, data: dict) -> "ScarletComponentData": 

78 """Reconstruct `ScarletComponentData` from JSON compatible dict 

79 

80 Parameters 

81 ---------- 

82 data : `dict` 

83 Dictionary representation of the object 

84 

85 Returns 

86 ------- 

87 result : `ScarletComponentData` 

88 The reconstructed object 

89 """ 

90 dataShallowCopy = dict(data) 

91 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

92 dataShallowCopy["extent"] = tuple(data["extent"]) 

93 shape = dataShallowCopy['extent'][::-1] 

94 numBands = shape[0] * shape[1] 

95 dataShallowCopy['model'] = np.array(data['model']).reshape((numBands,) + shape).astype(np.float32) 

96 return cls(**dataShallowCopy) 

97 

98 

99@dataclass 

100class ScarletFactorizedComponentData: 

101 """Data for a factorized component 

102 

103 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention, 

104 not the scarlet/C++ ``(y, x)`` convention. 

105 

106 Attributes 

107 ---------- 

108 xy0 : `tuple` of `int` 

109 The lower bound of the components bounding box. 

110 extent : `tuple` of `int` 

111 The `(width, height)` of the component array. 

112 center : `tuple` of `int` 

113 The ``(x, y)`` center of the component. 

114 Note: once this is converted into a scarlet `LiteBlend` the source has 

115 the traditional c++ `(y, x)` ordering. 

116 sed : `numpy.ndarray` 

117 The SED of the component. 

118 morph : `numpy.ndarray` 

119 The 2D morphology of the component. 

120 """ 

121 xy0: tuple[int, int] 

122 extent: tuple[int, int] 

123 center: tuple[float, float] 

124 sed: np.ndarray 

125 morph: np.ndarray 

126 

127 def asDict(self) -> dict: 

128 """Return the object encoded into a dict for JSON serialization 

129 

130 Returns 

131 ------- 

132 result : `dict` 

133 The object encoded as a JSON compatible dict 

134 """ 

135 return { 

136 "xy0": self.xy0, 

137 "extent": self.extent, 

138 "center": self.center, 

139 "sed": tuple(self.sed.astype(float)), 

140 "morph": tuple(self.morph.flatten().astype(float)) 

141 } 

142 

143 @classmethod 

144 def fromDict(cls, data: dict) -> "ScarletFactorizedComponentData": 

145 """Reconstruct `ScarletFactorizedComponentData` from JSON compatible 

146 dict. 

147 

148 Parameters 

149 ---------- 

150 data : `dict` 

151 Dictionary representation of the object 

152 

153 Returns 

154 ------- 

155 result : `ScarletFactorizedComponentData` 

156 The reconstructed object 

157 """ 

158 dataShallowCopy = dict(data) 

159 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

160 dataShallowCopy["extent"] = tuple(data["extent"]) 

161 shape = dataShallowCopy['extent'][::-1] 

162 dataShallowCopy["sed"] = np.array(data["sed"]).astype(np.float32) 

163 dataShallowCopy['morph'] = np.array(data['morph']).reshape(shape).astype(np.float32) 

164 return cls(**dataShallowCopy) 

165 

166 

167@dataclass 

168class ScarletSourceData: 

169 """Data for a scarlet source 

170 

171 Attributes 

172 ---------- 

173 components : `list` of `ScarletComponentData` 

174 The components contained in the source that are not factorized. 

175 factorizedComponents : `list` of `ScarletFactorizedComponentData` 

176 The components contained in the source that are factorized. 

177 peakId : `int` 

178 The peak ID of the source in it's parent's footprint peak catalog. 

179 """ 

180 components: list[ScarletComponentData] 

181 factorizedComponents: list[ScarletFactorizedComponentData] 

182 peakId: int 

183 

184 def asDict(self) -> dict: 

185 """Return the object encoded into a dict for JSON serialization 

186 

187 Returns 

188 ------- 

189 result : `dict` 

190 The object encoded as a JSON compatible dict 

191 """ 

192 result = { 

193 "components": [], 

194 "factorized": [], 

195 "peakId": self.peakId, 

196 } 

197 for component in self.components: 

198 reduced = component.asDict() 

199 result["components"].append(reduced) 

200 

201 for component in self.factorizedComponents: 

202 reduced = component.asDict() 

203 result["factorized"].append(reduced) 

204 return result 

205 

206 @classmethod 

207 def fromDict(cls, data: dict) -> "ScarletSourceData": 

208 """Reconstruct `ScarletSourceData` from JSON compatible 

209 dict. 

210 

211 Parameters 

212 ---------- 

213 data : `dict` 

214 Dictionary representation of the object 

215 

216 Returns 

217 ------- 

218 result : `ScarletSourceData` 

219 The reconstructed object 

220 """ 

221 dataShallowCopy = dict(data) 

222 del dataShallowCopy["factorized"] 

223 components = [] 

224 for component in data['components']: 

225 component = ScarletComponentData.fromDict(component) 

226 components.append(component) 

227 dataShallowCopy['components'] = components 

228 

229 factorized = [] 

230 for component in data["factorized"]: 

231 component = ScarletFactorizedComponentData.fromDict(component) 

232 factorized.append(component) 

233 dataShallowCopy['factorizedComponents'] = factorized 

234 dataShallowCopy["peakId"] = int(data["peakId"]) 

235 return cls(**dataShallowCopy) 

236 

237 

238@dataclass 

239class ScarletBlendData: 

240 """Data for an entire blend. 

241 

242 Note that `xy0`, `extent`, and `psfCenter` use lsst ``(x, y)`` convention, 

243 not the scarlet/C++ ``(y, x)`` convention. 

244 

245 Attributes 

246 ---------- 

247 xy0 : `tuple` of `int` 

248 The lower bound of the components bounding box. 

249 extent : `tuple` of `int` 

250 The `(width, height)` of the component array. 

251 sources : `dict` of `int`: `ScarletSourceData` 

252 Data for the sources contained in the blend. 

253 psfCenter : `tuple` of `int` 

254 The location used for the center of the PSF for 

255 the blend. 

256 """ 

257 xy0: tuple[int, int] 

258 extent: tuple[int, int] 

259 sources: dict[int, ScarletSourceData] 

260 psfCenter: tuple[float, float] 

261 

262 def asDict(self) -> dict: 

263 """Return the object encoded into a dict for JSON serialization 

264 

265 Returns 

266 ------- 

267 result : `dict` 

268 The object encoded as a JSON compatible dict 

269 """ 

270 result: dict[str, Any] = {"xy0": self.xy0, "extent": self.extent, "psfCenter": self.psfCenter} 

271 result['sources'] = {id: source.asDict() for id, source in self.sources.items()} 

272 return result 

273 

274 @classmethod 

275 def fromDict(cls, data: dict) -> "ScarletBlendData": 

276 """Reconstruct `ScarletBlendData` from JSON compatible 

277 dict. 

278 

279 Parameters 

280 ---------- 

281 data : `dict` 

282 Dictionary representation of the object 

283 

284 Returns 

285 ------- 

286 result : `ScarletBlendData` 

287 The reconstructed object 

288 """ 

289 dataShallowCopy = dict(data) 

290 dataShallowCopy["xy0"] = tuple(data["xy0"]) 

291 dataShallowCopy["extent"] = tuple(data["extent"]) 

292 dataShallowCopy["psfCenter"] = tuple(data["psfCenter"]) 

293 dataShallowCopy["sources"] = {int(id): ScarletSourceData.fromDict(source) 

294 for id, source in data['sources'].items()} 

295 return cls(**dataShallowCopy) 

296 

297 

298class ScarletModelData: 

299 """A container that propagates scarlet models for an entire `SourceCatalog` 

300 """ 

301 def __init__(self, filters, psf, blends=None): 

302 """Initialize an instance 

303 

304 Parameters 

305 ---------- 

306 filters : `list` of `str` 

307 The names of the filters. 

308 The order of the filters must be the same as the order of 

309 the multiband model arrays, and SEDs. 

310 psf : `numpy.ndarray` 

311 The 2D array of the PSF in scarlet model space. 

312 This is typically a narrow Gaussian integrated over the 

313 pixels in the exposure. 

314 blends : `dict` of [`int`: `ScarletBlendData`] 

315 Initial `dict` that maps parent IDs from the source catalog 

316 to the scarlet model data for the parent blend. 

317 """ 

318 self.filters = filters 

319 self.psf = psf 

320 if blends is None: 

321 blends = {} 

322 self.blends = blends 

323 

324 def json(self) -> str: 

325 """Serialize the data model to a JSON formatted string 

326 

327 Returns 

328 ------- 

329 result : `str` 

330 The result of the object converted into a JSON format 

331 """ 

332 result = { 

333 "filters": self.filters, 

334 "psfShape": self.psf.shape, 

335 "psf": list(self.psf.flatten()), 

336 "blends": {id: blend.asDict() for id, blend in self.blends.items()} 

337 } 

338 return json.dumps(result) 

339 

340 @classmethod 

341 def parse_obj(cls, data: dict) -> "ScarletModelData": 

342 """Construct a ScarletModelData from python decoded JSON object. 

343 

344 Parameters 

345 ---------- 

346 inMemoryDataset : `Mapping` 

347 The result of json.load(s) on a JSON persisted ScarletModelData 

348 

349 Returns 

350 ------- 

351 result : `ScarletModelData` 

352 The `ScarletModelData` that was loaded the from the input object 

353 """ 

354 dataShallowCopy = dict(data) 

355 modelPsf = np.array( 

356 dataShallowCopy["psf"]).reshape(dataShallowCopy.pop("psfShape")).astype(np.float32) 

357 dataShallowCopy["psf"] = modelPsf 

358 dataShallowCopy["blends"] = { 

359 int(id): ScarletBlendData.fromDict(blend) 

360 for id, blend in data['blends'].items() 

361 } 

362 return cls(**dataShallowCopy) 

363 

364 def updateCatalogFootprints(self, catalog, band, psfModel, redistributeImage=None, 

365 removeScarletData=True, updateFluxColumns=True): 

366 """Use the scarlet models to set HeavyFootprints for modeled sources 

367 

368 Parameters 

369 ---------- 

370 catalog : `lsst.afw.table.SourceCatalog` 

371 The catalog missing heavy footprints for deblended sources. 

372 band : `str` 

373 The name of the band that the catalog data describes. 

374 psfModel : `lsst.afw.detection.Psf` 

375 The observed PSF model for the catalog. 

376 redistributeImage : `lsst.afw.image.Image` 

377 The image that is the source for flux re-distribution. 

378 If `redistributeImage` is `None` then flux re-distribution is 

379 not performed. 

380 removeScarletData : `bool` 

381 Whether or not to remove `ScarletBlendData` for each blend 

382 in order to save memory. 

383 updateFluxColumns : `bool` 

384 Whether or not to update the `deblend_*` columns in the catalog. 

385 This should only be true when the input catalog schema already 

386 contains those columns. 

387 """ 

388 # Iterate over the blends, since flux re-distribution must be done on 

389 # all of the children with the same parent 

390 parents = catalog[catalog["parent"] == 0] 

391 # Get the index of the model for the given band 

392 bandIndex = self.filters.index(band) 

393 

394 for parentRecord in parents: 

395 parentId = parentRecord.getId() 

396 

397 try: 

398 blendModel = self.blends[parentId] 

399 except KeyError: 

400 # The parent was skipped in the deblender, so there are 

401 # no models for its sources. 

402 continue 

403 updateBlendRecords( 

404 blendData=blendModel, 

405 catalog=catalog, 

406 modelPsf=self.psf, 

407 observedPsf=psfModel, 

408 redistributeImage=redistributeImage, 

409 bandIndex=bandIndex, 

410 parentFootprint=parentRecord.getFootprint(), 

411 updateFluxColumns=updateFluxColumns, 

412 ) 

413 

414 # Save memory by removing the data for the blend 

415 if removeScarletData: 

416 del self.blends[parentId] 

417 

418 

419def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, redistributeImage, bandIndex, 

420 parentFootprint, updateFluxColumns): 

421 """Create footprints and update band-dependent columns in the catalog 

422 

423 Parameters 

424 ---------- 

425 blendData : `ScarletBlendData` 

426 Persistable data for the entire blend. 

427 catalog : `lsst.afw.table.SourceCatalog` 

428 The catalog that is being updated. 

429 modelPsf : `numpy.ndarray` 

430 The 2D model of the PSF. 

431 observedPsf : `lsst.afw.detection.Psf` 

432 The observed PSF model for the catalog. 

433 redistributeImage : `lsst.afw.image.Image` 

434 The image that is the source for flux re-distribution. 

435 If `redistributeImage` is `None` then flux re-distribution is 

436 not performed. 

437 bandIndex : `int` 

438 The number of the band to extract. 

439 parentFootprint : `lsst.afw.Footprint` 

440 The footprint of the parent, used for masking out the model 

441 when re-distributing flux. 

442 updateFluxColumns : `bool` 

443 Whether or not to update the `deblend_*` columns in the catalog. 

444 This should only be true when the input catalog schema already 

445 contains those columns. 

446 """ 

447 # We import here to avoid a circular dependency 

448 from .scarletDeblendTask import setDeblenderMetrics, getFootprintMask 

449 

450 useFlux = redistributeImage is not None 

451 xy0 = Point2I(*blendData.xy0) 

452 

453 blend = dataToScarlet( 

454 blendData=blendData, 

455 nBands=1, 

456 bandIndex=bandIndex, 

457 dtype=np.float32, 

458 ) 

459 

460 position = Point2D(*blendData.psfCenter) 

461 psfs = observedPsf.computeKernelImage(position).array[None, :, :] 

462 modelBox = Box((1,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0)) 

463 blend.observation = DummyObservation( 

464 psfs=psfs, 

465 model_psf=modelPsf[None, :, :], 

466 bbox=modelBox, 

467 dtype=np.float32, 

468 ) 

469 

470 # Set the metrics for the blend. 

471 # TODO: remove this once DM-34558 runs all deblender metrics 

472 # in a separate task. 

473 if updateFluxColumns: 

474 setDeblenderMetrics(blend) 

475 

476 # Update the source models if the scarlet models are used as 

477 # templates to re-distribute flux from an observation 

478 if useFlux: 

479 # Extract the image array to re-distribute its flux 

480 extent = Extent2I(*blendData.extent) 

481 bbox = Box2I(xy0, extent) 

482 blend.observation.images = redistributeImage[bbox].array[None, :, :] 

483 blend.observation.weights = ~getFootprintMask(parentFootprint, None)[None, :, :] 

484 # Re-distribute the flux for each source in-place 

485 weight_sources(blend) 

486 

487 # Update the HeavyFootprints for deblended sources 

488 # and update the band-dependent catalog columns. 

489 for source in blend.sources: 

490 sourceRecord = catalog.find(source.recordId) 

491 parent = catalog.find(sourceRecord["parent"]) 

492 peaks = parent.getFootprint().peaks 

493 peakIdx = np.where(peaks["id"] == source.peakId)[0][0] 

494 source.detectedPeak = peaks[peakIdx] 

495 # Set the Footprint 

496 heavy = liteModelToHeavy( 

497 source=source, 

498 blend=blend, 

499 xy0=xy0, 

500 useFlux=useFlux, 

501 ) 

502 sourceRecord.setFootprint(heavy) 

503 

504 if updateFluxColumns: 

505 # Set the flux of the scarlet model 

506 # TODO: this field should probably be deprecated, 

507 # since DM-33710 gives users access to the scarlet models. 

508 model = source.get_model()[0] 

509 sourceRecord.set("deblend_scarletFlux", np.sum(model)) 

510 

511 # Set the flux at the center of the model 

512 peak = heavy.peaks[0] 

513 img = heavyFootprintToImage(heavy, fill=0.0) 

514 try: 

515 sourceRecord.set("deblend_peak_instFlux", img.image[Point2I(peak["i_x"], peak["i_y"])]) 

516 except Exception: 

517 srcId = sourceRecord.getId() 

518 x = peak["i_x"] 

519 y = peak["i_y"] 

520 logger.warning( 

521 f"Source {srcId} at {x},{y} could not set the peak flux with error:", 

522 exc_info=1 

523 ) 

524 sourceRecord.set("deblend_peak_instFlux", np.nan) 

525 

526 # Set the metrics columns. 

527 # TODO: remove this once DM-34558 runs all deblender metrics 

528 # in a separate task. 

529 sourceRecord.set("deblend_maxOverlap", source.metrics.maxOverlap[0]) 

530 sourceRecord.set("deblend_fluxOverlap", source.metrics.fluxOverlap[0]) 

531 sourceRecord.set("deblend_fluxOverlapFraction", source.metrics.fluxOverlapFraction[0]) 

532 sourceRecord.set("deblend_blendedness", source.metrics.blendedness[0]) 

533 

534 

535def boundedDataToBox(nBands, boundedData): 

536 """Convert bounds from the data storage format to a `scarlet.bbox.Box` 

537 

538 Parameters 

539 ---------- 

540 nBands : `int` 

541 The number of bands in the model. 

542 boundedData : 

543 The scarlet data object containing `xy0` and `extent` 

544 attributes giving bounding box information in the lsst format 

545 `(x, y)`. 

546 

547 Returns 

548 ------- 

549 bbox : `scarlet.bbox.Box` 

550 The scarlet bounding box generated by the bounds. 

551 """ 

552 xy0 = (0, ) + boundedData.xy0[::-1] 

553 extent = (nBands, ) + boundedData.extent[::-1] 

554 bbox = Box(shape=extent, origin=xy0) 

555 return bbox 

556 

557 

558class ComponentCube: 

559 """Dummy component for scarlet main sources. 

560 

561 This is duck-typed to a `scarlet.lite.LiteComponent` in order to 

562 generate a model from the component. 

563 

564 If scarlet lite ever implements a component as a data cube, 

565 this class can be removed. 

566 """ 

567 def __init__(self, model, center, bbox, model_bbox): 

568 """Initialization 

569 

570 Parameters 

571 ---------- 

572 model : `numpy.ndarray` 

573 The 3D (bands, y, x) model of the component. 

574 center : `tuple` of `int` 

575 The `(y, x)` center of the component. 

576 bbox : `scarlet.bbox.Box` 

577 The bounding box of the component. 

578 `model_bbox` : `scarlet.bbox.Box` 

579 The bounding box of the entire blend. 

580 """ 

581 self.model = model 

582 self.center = center 

583 self.bbox = bbox 

584 

585 def get_model(self, bbox=None): 

586 """Generate the model for the source 

587 

588 Parameters 

589 ---------- 

590 bbox : `scarlet.bbox.Box` 

591 The bounding box to insert the model into. 

592 If `bbox` is `None` then the model is returned in its own 

593 bounding box. 

594 

595 Returns 

596 ------- 

597 model : `numpy.ndarray` 

598 The model as a 3D `(band, y, x)` array. 

599 """ 

600 model = self.model 

601 if bbox is not None: 

602 slices = overlapped_slices(bbox, self.bbox) 

603 _model = np.zeros(bbox.shape, model.dtype) 

604 _model[slices[0]] = model[slices[1]] 

605 model = _model 

606 return model 

607 

608 

609class DummyParameter(LiteParameter): 

610 """A parameter place holder 

611 

612 Models in scarlet have parameters, not arrays, 

613 for their sed's and morphologies, so this wrapper for 

614 the SED and morphology arrays implements the required 

615 methods and attributes. 

616 """ 

617 def __init__(self, x): 

618 self.x = x 

619 self.grad = None 

620 

621 def update(self, it, input_grad, *args): 

622 pass 

623 

624 def grow(self, new_shape, dist): 

625 pass 

626 

627 def shrink(self, dist): 

628 pass 

629 

630 

631class DummyObservation(LiteObservation): 

632 """An observation that does not have any image data 

633 

634 In order to reproduce a model in an observed seeing we make use of the 

635 scarlet `LiteObservation` class, but since we are not fitting the model 

636 to data we can use empty arrays for the image, variance, and weight data, 

637 and zero for the `noise_rms`. 

638 

639 Parameters 

640 ---------- 

641 psfs : `numpy.ndarray` 

642 The array of PSF images in each band 

643 psf_model : `numpy.ndarray` 

644 The image of the model PSF. 

645 bbox : `scarlet.bbox.Box` 

646 dtype : `numpy.dtype` 

647 The data type of the model that is generated. 

648 """ 

649 def __init__(self, psfs, model_psf, bbox, dtype): 

650 dummyImage = np.zeros([], dtype=dtype) 

651 

652 super().__init__( 

653 images=dummyImage, 

654 variance=dummyImage, 

655 weights=dummyImage, 

656 psfs=psfs, 

657 model_psf=model_psf, 

658 convolution_mode="real", 

659 noise_rms=0, 

660 bbox=bbox, 

661 ) 

662 

663 

664def dataToScarlet(blendData, nBands=None, bandIndex=None, dtype=np.float32): 

665 """Convert the storage data model into a scarlet lite blend 

666 

667 Parameters 

668 ---------- 

669 blendData : `ScarletBlendData` 

670 Persistable data for the entire blend. 

671 nBands : `int` 

672 The number of bands in the image. 

673 If `bandIndex` is `None` then this parameter is ignored and 

674 the number of bands is set to 1. 

675 bandIndex : `int` 

676 Index of model to extract. If `bandIndex` is `None` then the 

677 full model is extracted. 

678 dtype : `numpy.dtype` 

679 The data type of the model that is generated. 

680 

681 Returns 

682 ------- 

683 blend : `scarlet.lite.LiteBlend` 

684 A scarlet blend model extracted from persisted data. 

685 """ 

686 if bandIndex is not None: 

687 nBands = 1 

688 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0)) 

689 sources = [] 

690 for sourceId, sourceData in blendData.sources.items(): 

691 components = [] 

692 for componentData in sourceData.components: 

693 bbox = boundedDataToBox(nBands, componentData) 

694 if bandIndex is None: 

695 model = componentData.model 

696 else: 

697 model = componentData.model[bandIndex][None, :, :] 

698 component = ComponentCube( 

699 model=model, 

700 center=tuple(componentData.center[::-1]), 

701 bbox=bbox, 

702 ) 

703 components.append(component) 

704 for componentData in sourceData.factorizedComponents: 

705 bbox = boundedDataToBox(nBands, componentData) 

706 # Add dummy values for properties only needed for 

707 # model fitting. 

708 if bandIndex is None: 

709 sed = componentData.sed 

710 else: 

711 sed = componentData.sed[bandIndex:bandIndex+1] 

712 sed = DummyParameter(sed) 

713 morph = DummyParameter(componentData.morph) 

714 # Note: since we aren't fitting a model, we don't need to 

715 # set the RMS of the background. 

716 # We set it to NaN just to be safe. 

717 component = LiteFactorizedComponent( 

718 sed=sed, 

719 morph=morph, 

720 center=tuple(componentData.center[::-1]), 

721 bbox=bbox, 

722 model_bbox=modelBox, 

723 bg_rms=np.nan 

724 ) 

725 components.append(component) 

726 

727 source = LiteSource(components=components, dtype=dtype) 

728 source.recordId = sourceId 

729 source.peakId = sourceData.peakId 

730 sources.append(source) 

731 

732 return LiteBlend(sources=sources, observation=None) 

733 

734 

735def scarletLiteToData(blend, psfCenter, xy0): 

736 """Convert a scarlet lite blend into a persistable data object 

737 

738 Parameters 

739 ---------- 

740 blend : `scarlet.lite.LiteBlend` 

741 The blend that is being persisted. 

742 psfCenter : `tuple` of `int` 

743 The center of the PSF. 

744 xy0 : `tuple` of `int` 

745 The lower coordinate of the entire blend. 

746 

747 Returns 

748 ------- 

749 blendData : `ScarletBlendDataModel` 

750 The data model for a single blend. 

751 """ 

752 sources = {} 

753 for source in blend.sources: 

754 components = [] 

755 for component in source.components: 

756 if isinstance(component, LiteFactorizedComponent): 

757 componentData = ScarletFactorizedComponentData( 

758 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]), 

759 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]), 

760 center=tuple(int(x) for x in component.center[::-1]), 

761 sed=component.sed, 

762 morph=component.morph, 

763 ) 

764 else: 

765 componentData = ScarletComponentData( 

766 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]), 

767 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]), 

768 center=tuple(int(x) for x in component.center[::-1]), 

769 model=component.get_model(), 

770 ) 

771 components.append(componentData) 

772 sourceData = ScarletSourceData( 

773 components=[], 

774 factorizedComponents=components, 

775 peakId=source.peakId, 

776 ) 

777 sources[source.recordId] = sourceData 

778 

779 blendData = ScarletBlendData( 

780 xy0=(xy0.x, xy0.y), 

781 extent=blend.observation.bbox.shape[1:][::-1], 

782 sources=sources, 

783 psfCenter=psfCenter, 

784 ) 

785 

786 return blendData 

787 

788 

789def scarletToData(blend, psfCenter, xy0): 

790 """Convert a scarlet blend into a persistable data object 

791 

792 Parameters 

793 ---------- 

794 blend : `scarlet.Blend` 

795 The blend that is being persisted. 

796 psfCenter : `tuple` of `int` 

797 The center of the PSF. 

798 xy0 : `tuple` of `int` 

799 The lower coordinate of the entire blend. 

800 

801 Returns 

802 ------- 

803 blendData : `ScarletBlendDataModel` 

804 The data model for a single blend. 

805 """ 

806 sources = {} 

807 for source in blend.sources: 

808 componentData = ScarletComponentData( 

809 xy0=tuple(int(x) for x in source.bbox.origin[1:][::-1]), 

810 extent=tuple(int(x) for x in source.bbox.shape[1:][::-1]), 

811 center=tuple(int(x) for x in source.center[::-1]), 

812 model=source.get_model(), 

813 ) 

814 

815 sourceData = ScarletSourceData( 

816 components=[componentData], 

817 factorizedComponents=[], 

818 peakId=source.peakId, 

819 ) 

820 sources[source.recordId] = sourceData 

821 

822 blendData = ScarletBlendData( 

823 xy0=(int(xy0.x), int(xy0.y)), 

824 extent=tuple(int(x) for x in blend.observation.bbox.shape[1:][::-1]), 

825 sources=sources, 

826 psfCenter=psfCenter, 

827 ) 

828 

829 return blendData