Coverage for python/lsst/meas/extensions/scarlet/io.py: 24%
262 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-01 03:29 -0700
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-01 03:29 -0700
1from __future__ import annotations
3from dataclasses import dataclass
4import json
5from typing import Any
6import logging
7import numpy as np
8from scarlet.bbox import Box, overlapped_slices
9from scarlet.lite import LiteBlend, LiteFactorizedComponent, LiteObservation, LiteSource, LiteParameter
10from scarlet.lite.measure import weight_sources
12from lsst.geom import Box2I, Extent2I, Point2I, Point2D
13from lsst.afw.image import computePsfImage
15from .source import liteModelToHeavy
18__all__ = [
19 "ScarletComponentData",
20 "ScarletFactorizedComponentData",
21 "ScarletSourceData",
22 "ScarletBlendData",
23 "ScarletModelData",
24 "updateBlendRecords",
25 "boundedDataToBox",
26 "ComponentCube",
27 "dataToScarlet",
28 "scarletLiteToData",
29 "scarletToData",
30 "DummyObservation",
31]
33logger = logging.getLogger(__name__)
36@dataclass
37class ScarletComponentData:
38 """Data for a component expressed as a 3D data cube
40 For now this is used for scarlet main source models because
41 their structure is too complex to persist in the same
42 way that scarlet lite components can be persisted.
44 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention,
45 not the scarlet/C++ ``(y, x)`` convention.
47 Attributes
48 ----------
49 xy0 : `tuple` of `int`
50 The lower bound of the components bounding box.
51 extent : `tuple` of `int`
52 The `(width, height)` of the component array.
53 center : `tuple` of `int`
54 The center of the component.
55 model : `numpy.ndarray`
56 The model for the component.
57 """
58 xy0: tuple[int, int]
59 extent: tuple[int, int]
60 center: tuple[float, float]
61 model: np.ndarray
63 def asDict(self) -> dict:
64 """Return the object encoded into a dict for JSON serialization
66 Returns
67 -------
68 result : `dict`
69 The object encoded as a JSON compatible dict
70 """
71 return {
72 "xy0": self.xy0,
73 "extent": self.extent,
74 "center": self.extent,
75 "model": tuple(self.model.flatten().astype(float))
76 }
78 @classmethod
79 def fromDict(cls, data: dict) -> "ScarletComponentData":
80 """Reconstruct `ScarletComponentData` from JSON compatible dict
82 Parameters
83 ----------
84 data : `dict`
85 Dictionary representation of the object
87 Returns
88 -------
89 result : `ScarletComponentData`
90 The reconstructed object
91 """
92 dataShallowCopy = dict(data)
93 dataShallowCopy["xy0"] = tuple(data["xy0"])
94 dataShallowCopy["extent"] = tuple(data["extent"])
95 shape = dataShallowCopy['extent'][::-1]
96 numBands = shape[0] * shape[1]
97 dataShallowCopy['model'] = np.array(data['model']).reshape((numBands,) + shape).astype(np.float32)
98 return cls(**dataShallowCopy)
101@dataclass
102class ScarletFactorizedComponentData:
103 """Data for a factorized component
105 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention,
106 not the scarlet/C++ ``(y, x)`` convention.
108 Attributes
109 ----------
110 xy0 : `tuple` of `int`
111 The lower bound of the components bounding box.
112 extent : `tuple` of `int`
113 The `(width, height)` of the component array.
114 center : `tuple` of `int`
115 The ``(x, y)`` center of the component.
116 Note: once this is converted into a scarlet `LiteBlend` the source has
117 the traditional c++ `(y, x)` ordering.
118 sed : `numpy.ndarray`
119 The SED of the component.
120 morph : `numpy.ndarray`
121 The 2D morphology of the component.
122 """
123 xy0: tuple[int, int]
124 extent: tuple[int, int]
125 center: tuple[float, float]
126 sed: np.ndarray
127 morph: np.ndarray
129 def asDict(self) -> dict:
130 """Return the object encoded into a dict for JSON serialization
132 Returns
133 -------
134 result : `dict`
135 The object encoded as a JSON compatible dict
136 """
137 return {
138 "xy0": self.xy0,
139 "extent": self.extent,
140 "center": self.center,
141 "sed": tuple(self.sed.astype(float)),
142 "morph": tuple(self.morph.flatten().astype(float))
143 }
145 @classmethod
146 def fromDict(cls, data: dict) -> "ScarletFactorizedComponentData":
147 """Reconstruct `ScarletFactorizedComponentData` from JSON compatible
148 dict.
150 Parameters
151 ----------
152 data : `dict`
153 Dictionary representation of the object
155 Returns
156 -------
157 result : `ScarletFactorizedComponentData`
158 The reconstructed object
159 """
160 dataShallowCopy = dict(data)
161 dataShallowCopy["xy0"] = tuple(data["xy0"])
162 dataShallowCopy["extent"] = tuple(data["extent"])
163 shape = dataShallowCopy['extent'][::-1]
164 dataShallowCopy["sed"] = np.array(data["sed"]).astype(np.float32)
165 dataShallowCopy['morph'] = np.array(data['morph']).reshape(shape).astype(np.float32)
166 return cls(**dataShallowCopy)
169@dataclass
170class ScarletSourceData:
171 """Data for a scarlet source
173 Attributes
174 ----------
175 components : `list` of `ScarletComponentData`
176 The components contained in the source that are not factorized.
177 factorizedComponents : `list` of `ScarletFactorizedComponentData`
178 The components contained in the source that are factorized.
179 peakId : `int`
180 The peak ID of the source in it's parent's footprint peak catalog.
181 """
182 components: list[ScarletComponentData]
183 factorizedComponents: list[ScarletFactorizedComponentData]
184 peakId: int
186 def asDict(self) -> dict:
187 """Return the object encoded into a dict for JSON serialization
189 Returns
190 -------
191 result : `dict`
192 The object encoded as a JSON compatible dict
193 """
194 result = {
195 "components": [],
196 "factorized": [],
197 "peakId": self.peakId,
198 }
199 for component in self.components:
200 reduced = component.asDict()
201 result["components"].append(reduced)
203 for component in self.factorizedComponents:
204 reduced = component.asDict()
205 result["factorized"].append(reduced)
206 return result
208 @classmethod
209 def fromDict(cls, data: dict) -> "ScarletSourceData":
210 """Reconstruct `ScarletSourceData` from JSON compatible
211 dict.
213 Parameters
214 ----------
215 data : `dict`
216 Dictionary representation of the object
218 Returns
219 -------
220 result : `ScarletSourceData`
221 The reconstructed object
222 """
223 dataShallowCopy = dict(data)
224 del dataShallowCopy["factorized"]
225 components = []
226 for component in data['components']:
227 component = ScarletComponentData.fromDict(component)
228 components.append(component)
229 dataShallowCopy['components'] = components
231 factorized = []
232 for component in data["factorized"]:
233 component = ScarletFactorizedComponentData.fromDict(component)
234 factorized.append(component)
235 dataShallowCopy['factorizedComponents'] = factorized
236 dataShallowCopy["peakId"] = int(data["peakId"])
237 return cls(**dataShallowCopy)
240@dataclass
241class ScarletBlendData:
242 """Data for an entire blend.
244 Note that `xy0`, `extent`, and `psfCenter` use lsst ``(x, y)`` convention,
245 not the scarlet/C++ ``(y, x)`` convention.
247 Attributes
248 ----------
249 xy0 : `tuple` of `int`
250 The lower bound of the components bounding box.
251 extent : `tuple` of `int`
252 The `(width, height)` of the component array.
253 sources : `dict` of `int`: `ScarletSourceData`
254 Data for the sources contained in the blend.
255 psfCenter : `tuple` of `int`
256 The location used for the center of the PSF for
257 the blend.
258 """
259 xy0: tuple[int, int]
260 extent: tuple[int, int]
261 sources: dict[int, ScarletSourceData]
262 psfCenter: tuple[float, float]
264 def asDict(self) -> dict:
265 """Return the object encoded into a dict for JSON serialization
267 Returns
268 -------
269 result : `dict`
270 The object encoded as a JSON compatible dict
271 """
272 result: dict[str, Any] = {"xy0": self.xy0, "extent": self.extent, "psfCenter": self.psfCenter}
273 result['sources'] = {id: source.asDict() for id, source in self.sources.items()}
274 return result
276 @classmethod
277 def fromDict(cls, data: dict) -> "ScarletBlendData":
278 """Reconstruct `ScarletBlendData` from JSON compatible
279 dict.
281 Parameters
282 ----------
283 data : `dict`
284 Dictionary representation of the object
286 Returns
287 -------
288 result : `ScarletBlendData`
289 The reconstructed object
290 """
291 dataShallowCopy = dict(data)
292 dataShallowCopy["xy0"] = tuple(data["xy0"])
293 dataShallowCopy["extent"] = tuple(data["extent"])
294 dataShallowCopy["psfCenter"] = tuple(data["psfCenter"])
295 dataShallowCopy["sources"] = {int(id): ScarletSourceData.fromDict(source)
296 for id, source in data['sources'].items()}
297 return cls(**dataShallowCopy)
300class ScarletModelData:
301 """A container that propagates scarlet models for an entire `SourceCatalog`
302 """
303 def __init__(self, bands, psf, blends=None):
304 """Initialize an instance
306 Parameters
307 ----------
308 bands : `list` of `str`
309 The names of the bands.
310 The order of the bands must be the same as the order of
311 the multiband model arrays, and SEDs.
312 psf : `numpy.ndarray`
313 The 2D array of the PSF in scarlet model space.
314 This is typically a narrow Gaussian integrated over the
315 pixels in the exposure.
316 blends : `dict` of [`int`: `ScarletBlendData`]
317 Initial `dict` that maps parent IDs from the source catalog
318 to the scarlet model data for the parent blend.
319 """
320 self.bands = bands
321 self.psf = psf
322 if blends is None:
323 blends = {}
324 self.blends = blends
326 def json(self) -> str:
327 """Serialize the data model to a JSON formatted string
329 Returns
330 -------
331 result : `str`
332 The result of the object converted into a JSON format
333 """
334 result = {
335 "bands": self.bands,
336 "psfShape": self.psf.shape,
337 "psf": list(self.psf.flatten()),
338 "blends": {id: blend.asDict() for id, blend in self.blends.items()}
339 }
340 return json.dumps(result)
342 @classmethod
343 def parse_obj(cls, data: dict) -> "ScarletModelData":
344 """Construct a ScarletModelData from python decoded JSON object.
346 Parameters
347 ----------
348 inMemoryDataset : `Mapping`
349 The result of json.load(s) on a JSON persisted ScarletModelData
351 Returns
352 -------
353 result : `ScarletModelData`
354 The `ScarletModelData` that was loaded the from the input object
355 """
356 dataShallowCopy = dict(data)
357 modelPsf = np.array(
358 dataShallowCopy["psf"]).reshape(dataShallowCopy.pop("psfShape")).astype(np.float32)
359 dataShallowCopy["psf"] = modelPsf
360 dataShallowCopy["blends"] = {
361 int(id): ScarletBlendData.fromDict(blend)
362 for id, blend in data['blends'].items()
363 }
364 if "filters" in dataShallowCopy:
365 # Support the original version,
366 # which used "filters" instead of the now canonical "bands."
367 dataShallowCopy["bands"] = dataShallowCopy.pop("filters")
368 return cls(**dataShallowCopy)
370 def updateCatalogFootprints(self, catalog, band, psfModel, redistributeImage=None,
371 removeScarletData=True, updateFluxColumns=True):
372 """Use the scarlet models to set HeavyFootprints for modeled sources
374 Parameters
375 ----------
376 catalog : `lsst.afw.table.SourceCatalog`
377 The catalog missing heavy footprints for deblended sources.
378 band : `str`
379 The name of the band that the catalog data describes.
380 psfModel : `lsst.afw.detection.Psf`
381 The observed PSF model for the catalog.
382 redistributeImage : `lsst.afw.image.Image`
383 The image that is the source for flux re-distribution.
384 If `redistributeImage` is `None` then flux re-distribution is
385 not performed.
386 removeScarletData : `bool`
387 Whether or not to remove `ScarletBlendData` for each blend
388 in order to save memory.
389 updateFluxColumns : `bool`
390 Whether or not to update the `deblend_*` columns in the catalog.
391 This should only be true when the input catalog schema already
392 contains those columns.
393 """
394 # Iterate over the blends, since flux re-distribution must be done on
395 # all of the children with the same parent
396 parents = catalog[catalog["parent"] == 0]
397 # Get the index of the model for the given band
398 bandIndex = self.bands.index(band)
400 for parentRecord in parents:
401 parentId = parentRecord.getId()
403 try:
404 blendModel = self.blends[parentId]
405 except KeyError:
406 # The parent was skipped in the deblender, so there are
407 # no models for its sources.
408 continue
409 updateBlendRecords(
410 blendData=blendModel,
411 catalog=catalog,
412 modelPsf=self.psf,
413 observedPsf=psfModel,
414 redistributeImage=redistributeImage,
415 bandIndex=bandIndex,
416 parentFootprint=parentRecord.getFootprint(),
417 updateFluxColumns=updateFluxColumns,
418 )
420 # Save memory by removing the data for the blend
421 if removeScarletData:
422 del self.blends[parentId]
425def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, redistributeImage, bandIndex,
426 parentFootprint, updateFluxColumns):
427 """Create footprints and update band-dependent columns in the catalog
429 Parameters
430 ----------
431 blendData : `ScarletBlendData`
432 Persistable data for the entire blend.
433 catalog : `lsst.afw.table.SourceCatalog`
434 The catalog that is being updated.
435 modelPsf : `numpy.ndarray`
436 The 2D model of the PSF.
437 observedPsf : `lsst.afw.detection.Psf`
438 The observed PSF model for the catalog.
439 redistributeImage : `lsst.afw.image.Image`
440 The image that is the source for flux re-distribution.
441 If `redistributeImage` is `None` then flux re-distribution is
442 not performed.
443 bandIndex : `int`
444 The number of the band to extract.
445 parentFootprint : `lsst.afw.Footprint`
446 The footprint of the parent, used for masking out the model
447 when re-distributing flux.
448 updateFluxColumns : `bool`
449 Whether or not to update the `deblend_*` columns in the catalog.
450 This should only be true when the input catalog schema already
451 contains those columns.
452 """
453 # We import here to avoid a circular dependency
454 from .scarletDeblendTask import setDeblenderMetrics
456 useFlux = redistributeImage is not None
457 xy0 = Point2I(*blendData.xy0)
459 blend = dataToScarlet(
460 blendData=blendData,
461 nBands=1,
462 bandIndex=bandIndex,
463 dtype=np.float32,
464 )
466 position = Point2D(*blendData.psfCenter)
467 psfs = observedPsf.computeKernelImage(position).array[None, :, :]
468 modelBox = Box((1,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
469 blend.observation = DummyObservation(
470 psfs=psfs,
471 model_psf=modelPsf[None, :, :],
472 bbox=modelBox,
473 dtype=np.float32,
474 )
476 # Set the metrics for the blend.
477 # TODO: remove this once DM-34558 runs all deblender metrics
478 # in a separate task.
479 if updateFluxColumns:
480 setDeblenderMetrics(blend)
482 # Update the source models if the scarlet models are used as
483 # templates to re-distribute flux from an observation
484 if useFlux:
485 # Extract the image array to re-distribute its flux
486 extent = Extent2I(*blendData.extent)
487 bbox = Box2I(xy0, extent)
488 blend.observation.images = redistributeImage[bbox].array[None, :, :]
489 blend.observation.weights = parentFootprint.spans.asArray()[None, :, :]
490 # Re-distribute the flux for each source in-place
491 weight_sources(blend)
493 # Update the HeavyFootprints for deblended sources
494 # and update the band-dependent catalog columns.
495 for source in blend.sources:
496 sourceRecord = catalog.find(source.recordId)
497 parent = catalog.find(sourceRecord["parent"])
498 peaks = parent.getFootprint().peaks
499 peakIdx = np.where(peaks["id"] == source.peakId)[0][0]
500 source.detectedPeak = peaks[peakIdx]
501 # Set the Footprint
502 heavy = liteModelToHeavy(
503 source=source,
504 blend=blend,
505 xy0=xy0,
506 useFlux=useFlux,
507 )
508 sourceRecord.setFootprint(heavy)
510 if updateFluxColumns:
511 # Set the flux of the scarlet model
512 # TODO: this field should probably be deprecated,
513 # since DM-33710 gives users access to the scarlet models.
514 model = source.get_model()[0]
515 sourceRecord.set("deblend_scarletFlux", np.sum(model))
517 # Set the flux at the center of the model
518 peak = heavy.peaks[0]
520 img = heavy.extractImage(fill=0.0)
521 try:
522 sourceRecord.set("deblend_peak_instFlux", img[Point2I(peak["i_x"], peak["i_y"])])
523 except Exception:
524 srcId = sourceRecord.getId()
525 x = peak["i_x"]
526 y = peak["i_y"]
527 logger.warning(
528 f"Source {srcId} at {x},{y} could not set the peak flux with error:",
529 exc_info=1
530 )
531 sourceRecord.set("deblend_peak_instFlux", np.nan)
533 # Set the metrics columns.
534 # TODO: remove this once DM-34558 runs all deblender metrics
535 # in a separate task.
536 sourceRecord.set("deblend_maxOverlap", source.metrics.maxOverlap[0])
537 sourceRecord.set("deblend_fluxOverlap", source.metrics.fluxOverlap[0])
538 sourceRecord.set("deblend_fluxOverlapFraction", source.metrics.fluxOverlapFraction[0])
539 sourceRecord.set("deblend_blendedness", source.metrics.blendedness[0])
542def boundedDataToBox(nBands, boundedData):
543 """Convert bounds from the data storage format to a `scarlet.bbox.Box`
545 Parameters
546 ----------
547 nBands : `int`
548 The number of bands in the model.
549 boundedData :
550 The scarlet data object containing `xy0` and `extent`
551 attributes giving bounding box information in the lsst format
552 `(x, y)`.
554 Returns
555 -------
556 bbox : `scarlet.bbox.Box`
557 The scarlet bounding box generated by the bounds.
558 """
559 xy0 = (0, ) + boundedData.xy0[::-1]
560 extent = (nBands, ) + boundedData.extent[::-1]
561 bbox = Box(shape=extent, origin=xy0)
562 return bbox
565class ComponentCube:
566 """Dummy component for scarlet main sources.
568 This is duck-typed to a `scarlet.lite.LiteComponent` in order to
569 generate a model from the component.
571 If scarlet lite ever implements a component as a data cube,
572 this class can be removed.
573 """
574 def __init__(self, model, center, bbox, model_bbox):
575 """Initialization
577 Parameters
578 ----------
579 model : `numpy.ndarray`
580 The 3D (bands, y, x) model of the component.
581 center : `tuple` of `int`
582 The `(y, x)` center of the component.
583 bbox : `scarlet.bbox.Box`
584 The bounding box of the component.
585 `model_bbox` : `scarlet.bbox.Box`
586 The bounding box of the entire blend.
587 """
588 self.model = model
589 self.center = center
590 self.bbox = bbox
592 def get_model(self, bbox=None):
593 """Generate the model for the source
595 Parameters
596 ----------
597 bbox : `scarlet.bbox.Box`
598 The bounding box to insert the model into.
599 If `bbox` is `None` then the model is returned in its own
600 bounding box.
602 Returns
603 -------
604 model : `numpy.ndarray`
605 The model as a 3D `(band, y, x)` array.
606 """
607 model = self.model
608 if bbox is not None:
609 slices = overlapped_slices(bbox, self.bbox)
610 _model = np.zeros(bbox.shape, model.dtype)
611 _model[slices[0]] = model[slices[1]]
612 model = _model
613 return model
616class DummyParameter(LiteParameter):
617 """A parameter place holder
619 Models in scarlet have parameters, not arrays,
620 for their sed's and morphologies, so this wrapper for
621 the SED and morphology arrays implements the required
622 methods and attributes.
623 """
624 def __init__(self, x):
625 self.x = x
626 self.grad = None
628 def update(self, it, input_grad, *args):
629 pass
631 def grow(self, new_shape, dist):
632 pass
634 def shrink(self, dist):
635 pass
638class DummyObservation(LiteObservation):
639 """An observation that does not have any image data
641 In order to reproduce a model in an observed seeing we make use of the
642 scarlet `LiteObservation` class, but since we are not fitting the model
643 to data we can use empty arrays for the image, variance, and weight data,
644 and zero for the `noise_rms`.
646 Parameters
647 ----------
648 psfs : `numpy.ndarray`
649 The array of PSF images in each band
650 psf_model : `numpy.ndarray`
651 The image of the model PSF.
652 bbox : `scarlet.bbox.Box`
653 dtype : `numpy.dtype`
654 The data type of the model that is generated.
655 """
656 def __init__(self, psfs, model_psf, bbox, dtype):
657 dummyImage = np.zeros([], dtype=dtype)
659 super().__init__(
660 images=dummyImage,
661 variance=dummyImage,
662 weights=dummyImage,
663 psfs=psfs,
664 model_psf=model_psf,
665 convolution_mode="real",
666 noise_rms=0,
667 bbox=bbox,
668 )
671def multibandDataToScarlet(
672 modelData,
673 blendId,
674 observedPsfs=None,
675 dtype=np.float32,
676 mExposure=None,
677 footprint=None,
678):
679 """Convert the store data model into a scarlet lite blend,
680 including observation information.
682 While the typical use case in the science pipelines is to attach
683 scarlet models as footprints to a `SourceCatalog`, it can be advantageous
684 to load an entire multi-band blend. This requires (at a minimum) the
685 PSF in each observed band for the final model.
687 Parameters
688 ----------
689 modelData : `ScarletModelData`
690 The model for all of the blends in a given tract/patch.
691 blendId : `int`
692 The source record ID of the parent record in the catalog.
693 observedPsfs : `list` of `lsst.detection.Psf`
694 The PSF for each observed image.
695 Typically this is obtained using
696 ```butler.get("deep_Coadd_calexp.psf", **dataId)``` for a given
697 (tract, patch, band).
698 If `mExposure` is not `None` then the observed PSFs are generated
699 automatically, otherwise this parameter is required.
700 dtype : `numpy.dtype`
701 Datatype for the rendered model. If `mExposure` is not `None` then
702 this parameter is ignored and the `dtype` of the image is used.
703 mExposure : `lsst.afw.image.MultibandExposure`
704 The observed exposure in each band.
705 This is not required in order to render the models into numpy arrays,
706 however it is required if the user plans to perform a warm restart
707 using the stored models.
708 footprint : `lsst.afw.detection.Footprint`
709 The footprint of the parent blend.
710 This is only required if the user desires to perform a warm restart
711 and wants to mask out the pixels outside of the parent footprint
712 similar to when scarlet was executed in the science pipelines.
714 Returns
715 -------
716 blend : `scarlet.lite.LiteBlend`
717 The full scarlet model for the blend.
718 """
719 # Import here to prevent circular import
720 from .scarletDeblendTask import buildLiteObservation
722 # Extract the blend data
723 blendData = modelData.blends[blendId]
724 nBands = len(modelData.bands)
725 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
726 blend = dataToScarlet(blendData, nBands=nBands)
728 if mExposure is None:
729 psfModels = computePsfImage(observedPsfs, blendData.psfCenter, modelData.bands)
730 blend.observation = DummyObservation(
731 psfs=psfModels,
732 model_psf=modelData.psf[None, :, :],
733 bbox=modelBox,
734 dtype=dtype,
735 )
736 else:
737 blend.observation = buildLiteObservation(
738 modelPsf=modelData.psf,
739 psfCenter=blendData.psfCenter,
740 mExposure=mExposure,
741 footprint=footprint,
742 )
743 return blend
746def dataToScarlet(blendData, nBands=None, bandIndex=None, dtype=np.float32):
747 """Convert the storage data model into a scarlet lite blend
749 Parameters
750 ----------
751 blendData : `ScarletBlendData`
752 Persistable data for the entire blend.
753 nBands : `int`
754 The number of bands in the image.
755 If `bandIndex` is `None` then this parameter is ignored and
756 the number of bands is set to 1.
757 bandIndex : `int`
758 Index of model to extract. If `bandIndex` is `None` then the
759 full model is extracted.
760 dtype : `numpy.dtype`
761 The data type of the model that is generated.
763 Returns
764 -------
765 blend : `scarlet.lite.LiteBlend`
766 A scarlet blend model extracted from persisted data.
767 """
768 if bandIndex is not None:
769 nBands = 1
770 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
771 sources = []
772 for sourceId, sourceData in blendData.sources.items():
773 components = []
774 for componentData in sourceData.components:
775 bbox = boundedDataToBox(nBands, componentData)
776 if bandIndex is None:
777 model = componentData.model
778 else:
779 model = componentData.model[bandIndex][None, :, :]
780 component = ComponentCube(
781 model=model,
782 center=tuple(componentData.center[::-1]),
783 bbox=bbox,
784 )
785 components.append(component)
786 for componentData in sourceData.factorizedComponents:
787 bbox = boundedDataToBox(nBands, componentData)
788 # Add dummy values for properties only needed for
789 # model fitting.
790 if bandIndex is None:
791 sed = componentData.sed
792 else:
793 sed = componentData.sed[bandIndex:bandIndex+1]
794 sed = DummyParameter(sed)
795 morph = DummyParameter(componentData.morph)
796 # Note: since we aren't fitting a model, we don't need to
797 # set the RMS of the background.
798 # We set it to NaN just to be safe.
799 component = LiteFactorizedComponent(
800 sed=sed,
801 morph=morph,
802 center=tuple(componentData.center[::-1]),
803 bbox=bbox,
804 model_bbox=modelBox,
805 bg_rms=np.nan
806 )
807 components.append(component)
809 source = LiteSource(components=components, dtype=dtype)
810 source.recordId = sourceId
811 source.peakId = sourceData.peakId
812 sources.append(source)
814 return LiteBlend(sources=sources, observation=None)
817def scarletLiteToData(blend, psfCenter, xy0):
818 """Convert a scarlet lite blend into a persistable data object
820 Parameters
821 ----------
822 blend : `scarlet.lite.LiteBlend`
823 The blend that is being persisted.
824 psfCenter : `tuple` of `int`
825 The center of the PSF.
826 xy0 : `tuple` of `int`
827 The lower coordinate of the entire blend.
829 Returns
830 -------
831 blendData : `ScarletBlendDataModel`
832 The data model for a single blend.
833 """
834 sources = {}
835 for source in blend.sources:
836 components = []
837 for component in source.components:
838 if isinstance(component, LiteFactorizedComponent):
839 componentData = ScarletFactorizedComponentData(
840 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]),
841 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]),
842 center=tuple(int(x) for x in component.center[::-1]),
843 sed=component.sed,
844 morph=component.morph,
845 )
846 else:
847 componentData = ScarletComponentData(
848 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]),
849 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]),
850 center=tuple(int(x) for x in component.center[::-1]),
851 model=component.get_model(),
852 )
853 components.append(componentData)
854 sourceData = ScarletSourceData(
855 components=[],
856 factorizedComponents=components,
857 peakId=source.peakId,
858 )
859 sources[source.recordId] = sourceData
861 blendData = ScarletBlendData(
862 xy0=(xy0.x, xy0.y),
863 extent=blend.observation.bbox.shape[1:][::-1],
864 sources=sources,
865 psfCenter=psfCenter,
866 )
868 return blendData
871def scarletToData(blend, psfCenter, xy0):
872 """Convert a scarlet blend into a persistable data object
874 Parameters
875 ----------
876 blend : `scarlet.Blend`
877 The blend that is being persisted.
878 psfCenter : `tuple` of `int`
879 The center of the PSF.
880 xy0 : `tuple` of `int`
881 The lower coordinate of the entire blend.
883 Returns
884 -------
885 blendData : `ScarletBlendDataModel`
886 The data model for a single blend.
887 """
888 sources = {}
889 for source in blend.sources:
890 componentData = ScarletComponentData(
891 xy0=tuple(int(x) for x in source.bbox.origin[1:][::-1]),
892 extent=tuple(int(x) for x in source.bbox.shape[1:][::-1]),
893 center=tuple(int(x) for x in source.center[::-1]),
894 model=source.get_model(),
895 )
897 sourceData = ScarletSourceData(
898 components=[componentData],
899 factorizedComponents=[],
900 peakId=source.peakId,
901 )
902 sources[source.recordId] = sourceData
904 blendData = ScarletBlendData(
905 xy0=(int(xy0.x), int(xy0.y)),
906 extent=tuple(int(x) for x in blend.observation.bbox.shape[1:][::-1]),
907 sources=sources,
908 psfCenter=psfCenter,
909 )
911 return blendData