Coverage for python/lsst/meas/extensions/scarlet/io.py: 25%
251 statements
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-09 07:03 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2022-11-09 07:03 +0000
1from __future__ import annotations
3from dataclasses import dataclass
4import json
5from typing import Any
6import logging
7import numpy as np
8from scarlet.bbox import Box, overlapped_slices
9from scarlet.lite import LiteBlend, LiteFactorizedComponent, LiteObservation, LiteSource, LiteParameter
10from scarlet.lite.measure import weight_sources
11import traceback
13from lsst.geom import Box2I, Extent2I, Point2I, Point2D
14from lsst.afw.detection.multiband import heavyFootprintToImage
16from .source import liteModelToHeavy
18__all__ = [
19 "ScarletComponentData",
20 "ScarletFactorizedComponentData",
21 "ScarletSourceData",
22 "ScarletBlendData",
23 "ScarletModelData",
24 "updateBlendRecords",
25 "boundedDataToBox",
26 "ComponentCube",
27 "dataToScarlet",
28 "scarletLiteToData",
29 "scarletToData",
30]
32logger = logging.getLogger(__name__)
35@dataclass
36class ScarletComponentData:
37 """Data for a component expressed as a 3D data cube
39 For now this is used for scarlet main source models because
40 their structure is too complex to persist in the same
41 way that scarlet lite components can be persisted.
43 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention,
44 not the scarlet/C++ ``(y, x)`` convention.
46 Attributes
47 ----------
48 xy0 : `tuple` of `int`
49 The lower bound of the components bounding box.
50 extent : `tuple` of `int`
51 The `(width, height)` of the component array.
52 center : `tuple` of `int`
53 The center of the component.
54 model : `numpy.ndarray`
55 The model for the component.
56 """
57 xy0: tuple[int, int]
58 extent: tuple[int, int]
59 center: tuple[float, float]
60 model: np.ndarray
62 def asDict(self) -> dict:
63 """Return the object encoded into a dict for JSON serialization
65 Returns
66 -------
67 result : `dict`
68 The object encoded as a JSON compatible dict
69 """
70 return {
71 "xy0": self.xy0,
72 "extent": self.extent,
73 "center": self.extent,
74 "model": tuple(self.model.flatten().astype(float))
75 }
77 @classmethod
78 def fromDict(cls, data: dict) -> "ScarletComponentData":
79 """Reconstruct `ScarletComponentData` from JSON compatible dict
81 Parameters
82 ----------
83 data : `dict`
84 Dictionary representation of the object
86 Returns
87 -------
88 result : `ScarletComponentData`
89 The reconstructed object
90 """
91 dataShallowCopy = dict(data)
92 dataShallowCopy["xy0"] = tuple(data["xy0"])
93 dataShallowCopy["extent"] = tuple(data["extent"])
94 shape = dataShallowCopy['extent'][::-1]
95 numBands = shape[0] * shape[1]
96 dataShallowCopy['model'] = np.array(data['model']).reshape((numBands,) + shape).astype(np.float32)
97 return cls(**dataShallowCopy)
100@dataclass
101class ScarletFactorizedComponentData:
102 """Data for a factorized component
104 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention,
105 not the scarlet/C++ ``(y, x)`` convention.
107 Attributes
108 ----------
109 xy0 : `tuple` of `int`
110 The lower bound of the components bounding box.
111 extent : `tuple` of `int`
112 The `(width, height)` of the component array.
113 center : `tuple` of `int`
114 The ``(x, y)`` center of the component.
115 Note: once this is converted into a scarlet `LiteBlend` the source has
116 the traditional c++ `(y, x)` ordering.
117 sed : `numpy.ndarray`
118 The SED of the component.
119 morph : `numpy.ndarray`
120 The 2D morphology of the component.
121 """
122 xy0: tuple[int, int]
123 extent: tuple[int, int]
124 center: tuple[float, float]
125 sed: np.ndarray
126 morph: np.ndarray
128 def asDict(self) -> dict:
129 """Return the object encoded into a dict for JSON serialization
131 Returns
132 -------
133 result : `dict`
134 The object encoded as a JSON compatible dict
135 """
136 return {
137 "xy0": self.xy0,
138 "extent": self.extent,
139 "center": self.center,
140 "sed": tuple(self.sed.astype(float)),
141 "morph": tuple(self.morph.flatten().astype(float))
142 }
144 @classmethod
145 def fromDict(cls, data: dict) -> "ScarletFactorizedComponentData":
146 """Reconstruct `ScarletFactorizedComponentData` from JSON compatible
147 dict.
149 Parameters
150 ----------
151 data : `dict`
152 Dictionary representation of the object
154 Returns
155 -------
156 result : `ScarletFactorizedComponentData`
157 The reconstructed object
158 """
159 dataShallowCopy = dict(data)
160 dataShallowCopy["xy0"] = tuple(data["xy0"])
161 dataShallowCopy["extent"] = tuple(data["extent"])
162 shape = dataShallowCopy['extent'][::-1]
163 dataShallowCopy["sed"] = np.array(data["sed"]).astype(np.float32)
164 dataShallowCopy['morph'] = np.array(data['morph']).reshape(shape).astype(np.float32)
165 return cls(**dataShallowCopy)
168@dataclass
169class ScarletSourceData:
170 """Data for a scarlet source
172 Attributes
173 ----------
174 components : `list` of `ScarletComponentData`
175 The components contained in the source that are not factorized.
176 factorizedComponents : `list` of `ScarletFactorizedComponentData`
177 The components contained in the source that are factorized.
178 peakId : `int`
179 The peak ID of the source in it's parent's footprint peak catalog.
180 """
181 components: list[ScarletComponentData]
182 factorizedComponents: list[ScarletFactorizedComponentData]
183 peakId: int
185 def asDict(self) -> dict:
186 """Return the object encoded into a dict for JSON serialization
188 Returns
189 -------
190 result : `dict`
191 The object encoded as a JSON compatible dict
192 """
193 result = {
194 "components": [],
195 "factorized": [],
196 "peakId": self.peakId,
197 }
198 for component in self.components:
199 reduced = component.asDict()
200 result["components"].append(reduced)
202 for component in self.factorizedComponents:
203 reduced = component.asDict()
204 result["factorized"].append(reduced)
205 return result
207 @classmethod
208 def fromDict(cls, data: dict) -> "ScarletSourceData":
209 """Reconstruct `ScarletSourceData` from JSON compatible
210 dict.
212 Parameters
213 ----------
214 data : `dict`
215 Dictionary representation of the object
217 Returns
218 -------
219 result : `ScarletSourceData`
220 The reconstructed object
221 """
222 dataShallowCopy = dict(data)
223 del dataShallowCopy["factorized"]
224 components = []
225 for component in data['components']:
226 component = ScarletComponentData.fromDict(component)
227 components.append(component)
228 dataShallowCopy['components'] = components
230 factorized = []
231 for component in data["factorized"]:
232 component = ScarletFactorizedComponentData.fromDict(component)
233 factorized.append(component)
234 dataShallowCopy['factorizedComponents'] = factorized
235 dataShallowCopy["peakId"] = int(data["peakId"])
236 return cls(**dataShallowCopy)
239@dataclass
240class ScarletBlendData:
241 """Data for an entire blend.
243 Note that `xy0`, `extent`, and `psfCenter` use lsst ``(x, y)`` convention,
244 not the scarlet/C++ ``(y, x)`` convention.
246 Attributes
247 ----------
248 xy0 : `tuple` of `int`
249 The lower bound of the components bounding box.
250 extent : `tuple` of `int`
251 The `(width, height)` of the component array.
252 sources : `dict` of `int`: `ScarletSourceData`
253 Data for the sources contained in the blend.
254 psfCenter : `tuple` of `int`
255 The location used for the center of the PSF for
256 the blend.
257 """
258 xy0: tuple[int, int]
259 extent: tuple[int, int]
260 sources: dict[int, ScarletSourceData]
261 psfCenter: tuple[float, float]
263 def asDict(self) -> dict:
264 """Return the object encoded into a dict for JSON serialization
266 Returns
267 -------
268 result : `dict`
269 The object encoded as a JSON compatible dict
270 """
271 result: dict[str, Any] = {"xy0": self.xy0, "extent": self.extent, "psfCenter": self.psfCenter}
272 result['sources'] = {id: source.asDict() for id, source in self.sources.items()}
273 return result
275 @classmethod
276 def fromDict(cls, data: dict) -> "ScarletBlendData":
277 """Reconstruct `ScarletBlendData` from JSON compatible
278 dict.
280 Parameters
281 ----------
282 data : `dict`
283 Dictionary representation of the object
285 Returns
286 -------
287 result : `ScarletBlendData`
288 The reconstructed object
289 """
290 dataShallowCopy = dict(data)
291 dataShallowCopy["xy0"] = tuple(data["xy0"])
292 dataShallowCopy["extent"] = tuple(data["extent"])
293 dataShallowCopy["psfCenter"] = tuple(data["psfCenter"])
294 dataShallowCopy["sources"] = {int(id): ScarletSourceData.fromDict(source)
295 for id, source in data['sources'].items()}
296 return cls(**dataShallowCopy)
299class ScarletModelData:
300 """A container that propagates scarlet models for an entire `SourceCatalog`
301 """
302 def __init__(self, filters, psf, blends=None):
303 """Initialize an instance
305 Parameters
306 ----------
307 filters : `list` of `str`
308 The names of the filters.
309 The order of the filters must be the same as the order of
310 the multiband model arrays, and SEDs.
311 psf : `numpy.ndarray`
312 The 2D array of the PSF in scarlet model space.
313 This is typically a narrow Gaussian integrated over the
314 pixels in the exposure.
315 blends : `dict` of [`int`: `ScarletBlendData`]
316 Initial `dict` that maps parent IDs from the source catalog
317 to the scarlet model data for the parent blend.
318 """
319 self.filters = filters
320 self.psf = psf
321 if blends is None:
322 blends = {}
323 self.blends = blends
325 def json(self) -> str:
326 """Serialize the data model to a JSON formatted string
328 Returns
329 -------
330 result : `str`
331 The result of the object converted into a JSON format
332 """
333 result = {
334 "filters": self.filters,
335 "psfShape": self.psf.shape,
336 "psf": list(self.psf.flatten()),
337 "blends": {id: blend.asDict() for id, blend in self.blends.items()}
338 }
339 return json.dumps(result)
341 @classmethod
342 def parse_obj(cls, data: dict) -> "ScarletModelData":
343 """Construct a ScarletModelData from python decoded JSON object.
345 Parameters
346 ----------
347 inMemoryDataset : `Mapping`
348 The result of json.load(s) on a JSON persisted ScarletModelData
350 Returns
351 -------
352 result : `ScarletModelData`
353 The `ScarletModelData` that was loaded the from the input object
354 """
355 dataShallowCopy = dict(data)
356 modelPsf = np.array(
357 dataShallowCopy["psf"]).reshape(dataShallowCopy.pop("psfShape")).astype(np.float32)
358 dataShallowCopy["psf"] = modelPsf
359 dataShallowCopy["blends"] = {
360 int(id): ScarletBlendData.fromDict(blend)
361 for id, blend in data['blends'].items()
362 }
363 return cls(**dataShallowCopy)
365 def updateCatalogFootprints(self, catalog, band, psfModel, redistributeImage=None,
366 removeScarletData=True, updateFluxColumns=True):
367 """Use the scarlet models to set HeavyFootprints for modeled sources
369 Parameters
370 ----------
371 catalog : `lsst.afw.table.SourceCatalog`
372 The catalog missing heavy footprints for deblended sources.
373 band : `str`
374 The name of the band that the catalog data describes.
375 psfModel : `lsst.afw.detection.Psf`
376 The observed PSF model for the catalog.
377 redistributeImage : `lsst.afw.image.Image`
378 The image that is the source for flux re-distribution.
379 If `redistributeImage` is `None` then flux re-distribution is
380 not performed.
381 removeScarletData : `bool`
382 Whether or not to remove `ScarletBlendData` for each blend
383 in order to save memory.
384 updateFluxColumns : `bool`
385 Whether or not to update the `deblend_*` columns in the catalog.
386 This should only be true when the input catalog schema already
387 contains those columns.
388 """
389 # Iterate over the blends, since flux re-distribution must be done on
390 # all of the children with the same parent
391 parents = catalog[catalog["parent"] == 0]
392 # Get the index of the model for the given band
393 bandIndex = self.filters.index(band)
395 for parentRecord in parents:
396 parentId = parentRecord.getId()
398 try:
399 blendModel = self.blends[parentId]
400 except KeyError:
401 # The parent was skipped in the deblender, so there are
402 # no models for its sources.
403 continue
404 updateBlendRecords(
405 blendData=blendModel,
406 catalog=catalog,
407 modelPsf=self.psf,
408 observedPsf=psfModel,
409 redistributeImage=redistributeImage,
410 bandIndex=bandIndex,
411 parentFootprint=parentRecord.getFootprint(),
412 updateFluxColumns=updateFluxColumns,
413 )
415 # Save memory by removing the data for the blend
416 if removeScarletData:
417 del self.blends[parentId]
420def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, redistributeImage, bandIndex,
421 parentFootprint, updateFluxColumns):
422 """Create footprints and update band-dependent columns in the catalog
424 Parameters
425 ----------
426 blendData : `ScarletBlendData`
427 Persistable data for the entire blend.
428 catalog : `lsst.afw.table.SourceCatalog`
429 The catalog that is being updated.
430 modelPsf : `numpy.ndarray`
431 The 2D model of the PSF.
432 observedPsf : `lsst.afw.detection.Psf`
433 The observed PSF model for the catalog.
434 redistributeImage : `lsst.afw.image.Image`
435 The image that is the source for flux re-distribution.
436 If `redistributeImage` is `None` then flux re-distribution is
437 not performed.
438 bandIndex : `int`
439 The number of the band to extract.
440 parentFootprint : `lsst.afw.Footprint`
441 The footprint of the parent, used for masking out the model
442 when re-distributing flux.
443 updateFluxColumns : `bool`
444 Whether or not to update the `deblend_*` columns in the catalog.
445 This should only be true when the input catalog schema already
446 contains those columns.
447 """
448 # We import here to avoid a circular dependency
449 from .scarletDeblendTask import setDeblenderMetrics, getFootprintMask
451 useFlux = redistributeImage is not None
452 xy0 = Point2I(*blendData.xy0)
454 blend = dataToScarlet(
455 blendData=blendData,
456 nBands=1,
457 bandIndex=bandIndex,
458 dtype=np.float32,
459 )
461 position = Point2D(*blendData.psfCenter)
462 psfs = observedPsf.computeKernelImage(position).array[None, :, :]
463 modelBox = Box((1,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
464 blend.observation = DummyObservation(
465 psfs=psfs,
466 model_psf=modelPsf[None, :, :],
467 bbox=modelBox,
468 dtype=np.float32,
469 )
471 # Set the metrics for the blend.
472 # TODO: remove this once DM-34558 runs all deblender metrics
473 # in a separate task.
474 if updateFluxColumns:
475 setDeblenderMetrics(blend)
477 # Update the source models if the scarlet models are used as
478 # templates to re-distribute flux from an observation
479 if useFlux:
480 # Extract the image array to re-distribute its flux
481 extent = Extent2I(*blendData.extent)
482 bbox = Box2I(xy0, extent)
483 blend.observation.images = redistributeImage[bbox].array[None, :, :]
484 blend.observation.weights = ~getFootprintMask(parentFootprint, None)[None, :, :]
485 # Re-distribute the flux for each source in-place
486 weight_sources(blend)
488 # Update the HeavyFootprints for deblended sources
489 # and update the band-dependent catalog columns.
490 for source in blend.sources:
491 sourceRecord = catalog.find(source.recordId)
492 parent = catalog.find(sourceRecord["parent"])
493 peaks = parent.getFootprint().peaks
494 peakIdx = np.where(peaks["id"] == source.peakId)[0][0]
495 source.detectedPeak = peaks[peakIdx]
496 # Set the Footprint
497 heavy = liteModelToHeavy(
498 source=source,
499 blend=blend,
500 xy0=xy0,
501 useFlux=useFlux,
502 )
503 sourceRecord.setFootprint(heavy)
505 if updateFluxColumns:
506 # Set the flux of the scarlet model
507 # TODO: this field should probably be deprecated,
508 # since DM-33710 gives users access to the scarlet models.
509 model = source.get_model()[0]
510 sourceRecord.set("deblend_scarletFlux", np.sum(model))
512 # Set the flux at the center of the model
513 peak = heavy.peaks[0]
514 img = heavyFootprintToImage(heavy, fill=0.0)
515 try:
516 sourceRecord.set("deblend_peak_instFlux", img.image[Point2I(peak["i_x"], peak["i_y"])])
517 except Exception:
518 srcId = sourceRecord.getId()
519 x = peak["i_x"]
520 y = peak["i_y"]
521 logger.warn(f"Source {srcId} at {x},{y} could not set the peak flux with error:")
522 traceback.print_exc()
523 sourceRecord.set("deblend_peak_instFlux", np.nan)
525 # Set the metrics columns.
526 # TODO: remove this once DM-34558 runs all deblender metrics
527 # in a separate task.
528 sourceRecord.set("deblend_maxOverlap", source.metrics.maxOverlap[0])
529 sourceRecord.set("deblend_fluxOverlap", source.metrics.fluxOverlap[0])
530 sourceRecord.set("deblend_fluxOverlapFraction", source.metrics.fluxOverlapFraction[0])
531 sourceRecord.set("deblend_blendedness", source.metrics.blendedness[0])
534def boundedDataToBox(nBands, boundedData):
535 """Convert bounds from the data storage format to a `scarlet.bbox.Box`
537 Parameters
538 ----------
539 nBands : `int`
540 The number of bands in the model.
541 boundedData :
542 The scarlet data object containing `xy0` and `extent`
543 attributes giving bounding box information in the lsst format
544 `(x, y)`.
546 Returns
547 -------
548 bbox : `scarlet.bbox.Box`
549 The scarlet bounding box generated by the bounds.
550 """
551 xy0 = (0, ) + boundedData.xy0[::-1]
552 extent = (nBands, ) + boundedData.extent[::-1]
553 bbox = Box(shape=extent, origin=xy0)
554 return bbox
557class ComponentCube:
558 """Dummy component for scarlet main sources.
560 This is duck-typed to a `scarlet.lite.LiteComponent` in order to
561 generate a model from the component.
563 If scarlet lite ever implements a component as a data cube,
564 this class can be removed.
565 """
566 def __init__(self, model, center, bbox, model_bbox):
567 """Initialization
569 Parameters
570 ----------
571 model : `numpy.ndarray`
572 The 3D (bands, y, x) model of the component.
573 center : `tuple` of `int`
574 The `(y, x)` center of the component.
575 bbox : `scarlet.bbox.Box`
576 The bounding box of the component.
577 `model_bbox` : `scarlet.bbox.Box`
578 The bounding box of the entire blend.
579 """
580 self.model = model
581 self.center = center
582 self.bbox = bbox
584 def get_model(self, bbox=None):
585 """Generate the model for the source
587 Parameters
588 ----------
589 bbox : `scarlet.bbox.Box`
590 The bounding box to insert the model into.
591 If `bbox` is `None` then the model is returned in its own
592 bounding box.
594 Returns
595 -------
596 model : `numpy.ndarray`
597 The model as a 3D `(band, y, x)` array.
598 """
599 model = self.model
600 if bbox is not None:
601 slices = overlapped_slices(bbox, self.bbox)
602 _model = np.zeros(bbox.shape, model.dtype)
603 _model[slices[0]] = model[slices[1]]
604 model = _model
605 return model
608class DummyParameter(LiteParameter):
609 """A parameter place holder
611 Models in scarlet have parameters, not arrays,
612 for their sed's and morphologies, so this wrapper for
613 the SED and morphology arrays implements the required
614 methods and attributes.
615 """
616 def __init__(self, x):
617 self.x = x
618 self.grad = None
620 def update(self, it, input_grad, *args):
621 pass
623 def grow(self, new_shape, dist):
624 pass
626 def shrink(self, dist):
627 pass
630class DummyObservation(LiteObservation):
631 """An observation that does not have any image data
633 In order to reproduce a model in an observed seeing we make use of the
634 scarlet `LiteObservation` class, but since we are not fitting the model
635 to data we can use empty arrays for the image, variance, and weight data,
636 and zero for the `noise_rms`.
638 Parameters
639 ----------
640 psfs : `numpy.ndarray`
641 The array of PSF images in each band
642 psf_model : `numpy.ndarray`
643 The image of the model PSF.
644 bbox : `scarlet.bbox.Box`
645 dtype : `numpy.dtype`
646 The data type of the model that is generated.
647 """
648 def __init__(self, psfs, model_psf, bbox, dtype):
649 dummyImage = np.zeros([], dtype=dtype)
651 super().__init__(
652 images=dummyImage,
653 variance=dummyImage,
654 weights=dummyImage,
655 psfs=psfs,
656 model_psf=model_psf,
657 convolution_mode="real",
658 noise_rms=0,
659 bbox=bbox,
660 )
663def dataToScarlet(blendData, nBands=None, bandIndex=None, dtype=np.float32):
664 """Convert the storage data model into a scarlet lite blend
666 Parameters
667 ----------
668 blendData : `ScarletBlendData`
669 Persistable data for the entire blend.
670 nBands : `int`
671 The number of bands in the image.
672 If `bandIndex` is `None` then this parameter is ignored and
673 the number of bands is set to 1.
674 bandIndex : `int`
675 Index of model to extract. If `bandIndex` is `None` then the
676 full model is extracted.
677 dtype : `numpy.dtype`
678 The data type of the model that is generated.
680 Returns
681 -------
682 blend : `scarlet.lite.LiteBlend`
683 A scarlet blend model extracted from persisted data.
684 """
685 if bandIndex is not None:
686 nBands = 1
687 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
688 sources = []
689 for sourceId, sourceData in blendData.sources.items():
690 components = []
691 for componentData in sourceData.components:
692 bbox = boundedDataToBox(nBands, componentData)
693 if bandIndex is None:
694 model = componentData.model
695 else:
696 model = componentData.model[bandIndex][None, :, :]
697 component = ComponentCube(
698 model=model,
699 center=tuple(componentData.center[::-1]),
700 bbox=bbox,
701 )
702 components.append(component)
703 for componentData in sourceData.factorizedComponents:
704 bbox = boundedDataToBox(nBands, componentData)
705 # Add dummy values for properties only needed for
706 # model fitting.
707 if bandIndex is None:
708 sed = componentData.sed
709 else:
710 sed = componentData.sed[bandIndex:bandIndex+1]
711 sed = DummyParameter(sed)
712 morph = DummyParameter(componentData.morph)
713 # Note: since we aren't fitting a model, we don't need to
714 # set the RMS of the background.
715 # We set it to NaN just to be safe.
716 component = LiteFactorizedComponent(
717 sed=sed,
718 morph=morph,
719 center=tuple(componentData.center[::-1]),
720 bbox=bbox,
721 model_bbox=modelBox,
722 bg_rms=np.nan
723 )
724 components.append(component)
726 source = LiteSource(components=components, dtype=dtype)
727 source.recordId = sourceId
728 source.peakId = sourceData.peakId
729 sources.append(source)
731 return LiteBlend(sources=sources, observation=None)
734def scarletLiteToData(blend, psfCenter, xy0):
735 """Convert a scarlet lite blend into a persistable data object
737 Parameters
738 ----------
739 blend : `scarlet.lite.LiteBlend`
740 The blend that is being persisted.
741 psfCenter : `tuple` of `int`
742 The center of the PSF.
743 xy0 : `tuple` of `int`
744 The lower coordinate of the entire blend.
746 Returns
747 -------
748 blendData : `ScarletBlendDataModel`
749 The data model for a single blend.
750 """
751 sources = {}
752 for source in blend.sources:
753 components = []
754 for component in source.components:
755 if isinstance(component, LiteFactorizedComponent):
756 componentData = ScarletFactorizedComponentData(
757 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]),
758 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]),
759 center=tuple(int(x) for x in component.center[::-1]),
760 sed=component.sed,
761 morph=component.morph,
762 )
763 else:
764 componentData = ScarletComponentData(
765 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]),
766 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]),
767 center=tuple(int(x) for x in component.center[::-1]),
768 model=component.get_model(),
769 )
770 components.append(componentData)
771 sourceData = ScarletSourceData(
772 components=[],
773 factorizedComponents=components,
774 peakId=source.peakId,
775 )
776 sources[source.recordId] = sourceData
778 blendData = ScarletBlendData(
779 xy0=(xy0.x, xy0.y),
780 extent=blend.observation.bbox.shape[1:][::-1],
781 sources=sources,
782 psfCenter=psfCenter,
783 )
785 return blendData
788def scarletToData(blend, psfCenter, xy0):
789 """Convert a scarlet blend into a persistable data object
791 Parameters
792 ----------
793 blend : `scarlet.Blend`
794 The blend that is being persisted.
795 psfCenter : `tuple` of `int`
796 The center of the PSF.
797 xy0 : `tuple` of `int`
798 The lower coordinate of the entire blend.
800 Returns
801 -------
802 blendData : `ScarletBlendDataModel`
803 The data model for a single blend.
804 """
805 sources = {}
806 for source in blend.sources:
807 componentData = ScarletComponentData(
808 xy0=tuple(int(x) for x in source.bbox.origin[1:][::-1]),
809 extent=tuple(int(x) for x in source.bbox.shape[1:][::-1]),
810 center=tuple(int(x) for x in source.center[::-1]),
811 model=source.get_model(),
812 )
814 sourceData = ScarletSourceData(
815 components=[componentData],
816 factorizedComponents=[],
817 peakId=source.peakId,
818 )
819 sources[source.recordId] = sourceData
821 blendData = ScarletBlendData(
822 xy0=(int(xy0.x), int(xy0.y)),
823 extent=tuple(int(x) for x in blend.observation.bbox.shape[1:][::-1]),
824 sources=sources,
825 psfCenter=psfCenter,
826 )
828 return blendData