Coverage for python/lsst/meas/extensions/scarlet/io.py: 24%
289 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-25 16:48 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-10-25 16:48 +0000
1from __future__ import annotations
3from dataclasses import dataclass
4import json
5import logging
6import numpy as np
7from scarlet.bbox import Box, overlapped_slices
8from scarlet.lite import LiteBlend, LiteFactorizedComponent, LiteObservation, LiteSource, LiteParameter
9from scarlet.lite.measure import weight_sources
11from lsst.geom import Box2I, Extent2I, Point2I, Point2D
12from lsst.afw.image import computePsfImage
13from lsst.afw.detection import Footprint
15from .source import liteModelToHeavy
18__all__ = [
19 "ScarletComponentData",
20 "ScarletFactorizedComponentData",
21 "ScarletSourceData",
22 "ScarletBlendData",
23 "ScarletModelData",
24 "updateBlendRecords",
25 "boundedDataToBox",
26 "ComponentCube",
27 "dataToScarlet",
28 "scarletLiteToData",
29 "scarletToData",
30 "DummyObservation",
31]
33logger = logging.getLogger(__name__)
36@dataclass
37class ScarletComponentData:
38 """Data for a component expressed as a 3D data cube
40 For now this is used for scarlet main source models because
41 their structure is too complex to persist in the same
42 way that scarlet lite components can be persisted.
44 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention,
45 not the scarlet/C++ ``(y, x)`` convention.
47 Attributes
48 ----------
49 xy0 : `tuple` of `int`
50 The lower bound of the components bounding box.
51 extent : `tuple` of `int`
52 The `(width, height)` of the component array.
53 center : `tuple` of `int`
54 The center of the component.
55 model : `numpy.ndarray`
56 The model for the component.
57 """
58 xy0: tuple[int, int]
59 extent: tuple[int, int]
60 center: tuple[float, float]
61 model: np.ndarray
63 def asDict(self) -> dict:
64 """Return the object encoded into a dict for JSON serialization
66 Returns
67 -------
68 result : `dict`
69 The object encoded as a JSON compatible dict
70 """
71 return {
72 "xy0": self.xy0,
73 "extent": self.extent,
74 "center": self.extent,
75 "model": tuple(self.model.flatten().astype(float))
76 }
78 @classmethod
79 def fromDict(cls, data: dict) -> "ScarletComponentData":
80 """Reconstruct `ScarletComponentData` from JSON compatible dict
82 Parameters
83 ----------
84 data : `dict`
85 Dictionary representation of the object
87 Returns
88 -------
89 result : `ScarletComponentData`
90 The reconstructed object
91 """
92 dataShallowCopy = dict(data)
93 dataShallowCopy["xy0"] = tuple(data["xy0"])
94 dataShallowCopy["extent"] = tuple(data["extent"])
95 shape = dataShallowCopy['extent'][::-1]
96 numBands = shape[0] * shape[1]
97 dataShallowCopy['model'] = np.array(data['model']).reshape((numBands,) + shape).astype(np.float32)
98 return cls(**dataShallowCopy)
101@dataclass
102class ScarletFactorizedComponentData:
103 """Data for a factorized component
105 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention,
106 not the scarlet/C++ ``(y, x)`` convention.
108 Attributes
109 ----------
110 xy0 : `tuple` of `int`
111 The lower bound of the components bounding box.
112 extent : `tuple` of `int`
113 The `(width, height)` of the component array.
114 center : `tuple` of `int`
115 The ``(x, y)`` center of the component.
116 Note: once this is converted into a scarlet `LiteBlend` the source has
117 the traditional c++ `(y, x)` ordering.
118 sed : `numpy.ndarray`
119 The SED of the component.
120 morph : `numpy.ndarray`
121 The 2D morphology of the component.
122 """
123 xy0: tuple[int, int]
124 extent: tuple[int, int]
125 center: tuple[float, float]
126 sed: np.ndarray
127 morph: np.ndarray
129 def asDict(self) -> dict:
130 """Return the object encoded into a dict for JSON serialization
132 Returns
133 -------
134 result : `dict`
135 The object encoded as a JSON compatible dict
136 """
137 return {
138 "xy0": self.xy0,
139 "extent": self.extent,
140 "center": self.center,
141 "sed": tuple(self.sed.astype(float)),
142 "morph": tuple(self.morph.flatten().astype(float))
143 }
145 @classmethod
146 def fromDict(cls, data: dict) -> "ScarletFactorizedComponentData":
147 """Reconstruct `ScarletFactorizedComponentData` from JSON compatible
148 dict.
150 Parameters
151 ----------
152 data : `dict`
153 Dictionary representation of the object
155 Returns
156 -------
157 result : `ScarletFactorizedComponentData`
158 The reconstructed object
159 """
160 dataShallowCopy = dict(data)
161 dataShallowCopy["xy0"] = tuple(data["xy0"])
162 dataShallowCopy["extent"] = tuple(data["extent"])
163 shape = dataShallowCopy['extent'][::-1]
164 dataShallowCopy["sed"] = np.array(data["sed"]).astype(np.float32)
165 dataShallowCopy['morph'] = np.array(data['morph']).reshape(shape).astype(np.float32)
166 return cls(**dataShallowCopy)
169@dataclass
170class ScarletSourceData:
171 """Data for a scarlet source
173 Attributes
174 ----------
175 components : `list` of `ScarletComponentData`
176 The components contained in the source that are not factorized.
177 factorizedComponents : `list` of `ScarletFactorizedComponentData`
178 The components contained in the source that are factorized.
179 peakId : `int`
180 The peak ID of the source in it's parent's footprint peak catalog.
181 """
182 components: list[ScarletComponentData]
183 factorizedComponents: list[ScarletFactorizedComponentData]
184 peakId: int
186 def asDict(self) -> dict:
187 """Return the object encoded into a dict for JSON serialization
189 Returns
190 -------
191 result : `dict`
192 The object encoded as a JSON compatible dict
193 """
194 result = {
195 "components": [],
196 "factorized": [],
197 "peakId": self.peakId,
198 }
199 for component in self.components:
200 reduced = component.asDict()
201 result["components"].append(reduced)
203 for component in self.factorizedComponents:
204 reduced = component.asDict()
205 result["factorized"].append(reduced)
206 return result
208 @classmethod
209 def fromDict(cls, data: dict) -> "ScarletSourceData":
210 """Reconstruct `ScarletSourceData` from JSON compatible
211 dict.
213 Parameters
214 ----------
215 data : `dict`
216 Dictionary representation of the object
218 Returns
219 -------
220 result : `ScarletSourceData`
221 The reconstructed object
222 """
223 dataShallowCopy = dict(data)
224 del dataShallowCopy["factorized"]
225 components = []
226 for component in data['components']:
227 component = ScarletComponentData.fromDict(component)
228 components.append(component)
229 dataShallowCopy['components'] = components
231 factorized = []
232 for component in data["factorized"]:
233 component = ScarletFactorizedComponentData.fromDict(component)
234 factorized.append(component)
235 dataShallowCopy['factorizedComponents'] = factorized
236 dataShallowCopy["peakId"] = int(data["peakId"])
237 return cls(**dataShallowCopy)
240@dataclass
241class ScarletBlendData:
242 """Data for an entire blend.
244 Note that `xy0`, `extent`, and `psfCenter` use lsst ``(x, y)`` convention,
245 not the scarlet/C++ ``(y, x)`` convention.
247 Attributes
248 ----------
249 xy0 : `tuple` of `int`
250 The lower bound of the components bounding box.
251 extent : `tuple` of `int`
252 The `(width, height)` of the component array.
253 sources : `dict` of `int`: `ScarletSourceData`
254 Data for the sources contained in the blend.
255 psfCenter : `tuple` of `int`
256 The location used for the center of the PSF for
257 the blend.
258 bands : `list` of `str`
259 The names of the bands.
260 The order of the bands must be the same as the order of
261 the multiband model arrays, and SEDs.
262 """
263 xy0: tuple[int, int]
264 extent: tuple[int, int]
265 sources: dict[int, ScarletSourceData]
266 psfCenter: tuple[float, float]
267 bands: tuple[str]
269 def asDict(self) -> dict:
270 """Return the object encoded into a dict for JSON serialization
272 Returns
273 -------
274 result : `dict`
275 The object encoded as a JSON compatible dict
276 """
277 return {
278 "xy0": self.xy0,
279 "extent": self.extent,
280 "psfCenter": self.psfCenter,
281 "sources": {id: source.asDict() for id, source in self.sources.items()},
282 "bands": self.bands,
283 }
285 @classmethod
286 def fromDict(cls, data: dict) -> "ScarletBlendData":
287 """Reconstruct `ScarletBlendData` from JSON compatible
288 dict.
290 Parameters
291 ----------
292 data : `dict`
293 Dictionary representation of the object
295 Returns
296 -------
297 result : `ScarletBlendData`
298 The reconstructed object
299 """
300 dataShallowCopy = dict(data)
301 dataShallowCopy["xy0"] = tuple(data["xy0"])
302 dataShallowCopy["extent"] = tuple(data["extent"])
303 dataShallowCopy["psfCenter"] = tuple(data["psfCenter"])
304 dataShallowCopy["sources"] = {int(id): ScarletSourceData.fromDict(source)
305 for id, source in data['sources'].items()}
306 dataShallowCopy["bands"] = tuple(data["bands"])
307 return cls(**dataShallowCopy)
310class ScarletModelData:
311 """A container that propagates scarlet models for an entire `SourceCatalog`
312 """
313 def __init__(self, psf, blends=None):
314 """Initialize an instance
316 Parameters
317 ----------
318 psf : `numpy.ndarray`
319 The 2D array of the PSF in scarlet model space.
320 This is typically a narrow Gaussian integrated over the
321 pixels in the exposure.
322 blends : `dict` of [`int`: `ScarletBlendData`]
323 Initial `dict` that maps parent IDs from the source catalog
324 to the scarlet model data for the parent blend.
325 """
326 self.psf = psf
327 if blends is None:
328 blends = {}
329 self.blends = blends
331 def json(self) -> str:
332 """Serialize the data model to a JSON formatted string
334 Returns
335 -------
336 result : `str`
337 The result of the object converted into a JSON format
338 """
339 result = {
340 "psfShape": self.psf.shape,
341 "psf": list(self.psf.flatten()),
342 "blends": {id: blend.asDict() for id, blend in self.blends.items()}
343 }
344 return json.dumps(result)
346 @classmethod
347 def parse_obj(cls, data: dict) -> "ScarletModelData":
348 """Construct a ScarletModelData from python decoded JSON object.
350 Parameters
351 ----------
352 inMemoryDataset : `Mapping`
353 The result of json.load(s) on a JSON persisted ScarletModelData
355 Returns
356 -------
357 result : `ScarletModelData`
358 The `ScarletModelData` that was loaded the from the input object
359 """
360 dataShallowCopy = dict(data)
361 modelPsf = np.array(
362 dataShallowCopy["psf"]).reshape(dataShallowCopy.pop("psfShape")).astype(np.float32)
363 dataShallowCopy["psf"] = modelPsf
364 dataShallowCopy["blends"] = {
365 int(id): ScarletBlendData.fromDict(blend)
366 for id, blend in data['blends'].items()
367 }
368 return cls(**dataShallowCopy)
370 def updateCatalogFootprints(self, catalog, band, psfModel, maskImage=None, redistributeImage=None,
371 removeScarletData=True, updateFluxColumns=True):
372 """Use the scarlet models to set HeavyFootprints for modeled sources
374 Parameters
375 ----------
376 catalog : `lsst.afw.table.SourceCatalog`
377 The catalog missing heavy footprints for deblended sources.
378 band : `str`
379 The name of the band that the catalog data describes.
380 psfModel : `lsst.afw.detection.Psf`
381 The observed PSF model for the catalog.
382 maskImage : `lsst.afw.image.MaskX`
383 The masked image used to calculate the fraction of pixels
384 in each footprint with valid data.
385 This is only used when `updateFluxColumns` is `True`,
386 and is required if it is.
387 redistributeImage : `lsst.afw.image.Image`
388 The image that is the source for flux re-distribution.
389 If `redistributeImage` is `None` then flux re-distribution is
390 not performed.
391 removeScarletData : `bool`
392 Whether or not to remove `ScarletBlendData` for each blend
393 in order to save memory.
394 updateFluxColumns : `bool`
395 Whether or not to update the `deblend_*` columns in the catalog.
396 This should only be true when the input catalog schema already
397 contains those columns.
398 """
399 # Iterate over the blends, since flux re-distribution must be done on
400 # all of the children with the same parent
401 parents = catalog[catalog["parent"] == 0]
403 for parentRecord in parents:
404 parentId = parentRecord.getId()
406 try:
407 blendModel = self.blends[parentId]
408 except KeyError:
409 # The parent was skipped in the deblender, so there are
410 # no models for its sources.
411 continue
413 parent = catalog.find(parentId)
414 if updateFluxColumns and redistributeImage is not None:
415 # Update the data coverage
416 # (1 - # of NO_DATA pixels/# of pixels)
417 parentRecord["deblend_dataCoverage"] = calculateFootprintCoverage(
418 parent.getFootprint(),
419 maskImage
420 )
422 if band not in blendModel.bands:
423 parent = catalog.find(parentId)
424 peaks = parent.getFootprint().peaks
425 # Set the footprint and coverage of the sources in this blend
426 # to zero
427 if updateFluxColumns:
428 parentRecord["deblend_dataCoverage"] = 0
429 for sourceId, sourceData in blendModel.sources.items():
430 sourceRecord = catalog.find(sourceId)
431 footprint = Footprint()
432 peakIdx = np.where(peaks["id"] == sourceData.peakId)[0][0]
433 peak = peaks[peakIdx]
434 footprint.addPeak(peak.getIx(), peak.getIy(), peak.getPeakValue())
435 sourceRecord.setFootprint(footprint)
436 if updateFluxColumns:
437 sourceRecord["deblend_dataCoverage"] = 0
438 continue
440 # Get the index of the model for the given band
441 bandIndex = blendModel.bands.index(band)
443 updateBlendRecords(
444 blendData=blendModel,
445 catalog=catalog,
446 modelPsf=self.psf,
447 observedPsf=psfModel,
448 maskImage=maskImage,
449 redistributeImage=redistributeImage,
450 bandIndex=bandIndex,
451 parentFootprint=parentRecord.getFootprint(),
452 updateFluxColumns=updateFluxColumns,
453 )
455 # Save memory by removing the data for the blend
456 if removeScarletData:
457 del self.blends[parentId]
460def calculateFootprintCoverage(footprint, maskImage):
461 """Calculate the fraction of pixels with no data in a Footprint
463 Parameters
464 ----------
465 footprint : `lsst.afw.detection.Footprint`
466 The footprint to check for missing data.
467 maskImage : `lsst.afw.image.MaskX`
468 The mask image with the ``NO_DATA`` bit set.
470 Returns
471 -------
472 coverage : `float`
473 The fraction of pixels in `footprint` where the ``NO_DATA`` bit is set.
474 """
475 # Store the value of "NO_DATA" from the mask plane.
476 noDataInt = 2**maskImage.getMaskPlaneDict()["NO_DATA"]
478 # Calculate the coverage in the footprint
479 bbox = footprint.getBBox()
480 spans = footprint.spans.asArray()
481 totalArea = footprint.getArea()
482 mask = maskImage[bbox].array & noDataInt
483 noData = (mask * spans) > 0
484 coverage = 1 - np.sum(noData)/totalArea
485 return coverage
488def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, maskImage, redistributeImage, bandIndex,
489 parentFootprint, updateFluxColumns):
490 """Create footprints and update band-dependent columns in the catalog
492 Parameters
493 ----------
494 blendData : `ScarletBlendData`
495 Persistable data for the entire blend.
496 catalog : `lsst.afw.table.SourceCatalog`
497 The catalog that is being updated.
498 modelPsf : `numpy.ndarray`
499 The 2D model of the PSF.
500 observedPsf : `lsst.afw.detection.Psf`
501 The observed PSF model for the catalog.
502 maskImage : `lsst.afw.image.MaskX`
503 The masked image used to calculate the fraction of pixels
504 in each footprint with valid data.
505 This is only used when `updateFluxColumns` is `True`,
506 and is required if it is.
507 redistributeImage : `lsst.afw.image.Image`
508 The image that is the source for flux re-distribution.
509 If `redistributeImage` is `None` then flux re-distribution is
510 not performed.
511 bandIndex : `int`
512 The number of the band to extract.
513 parentFootprint : `lsst.afw.Footprint`
514 The footprint of the parent, used for masking out the model
515 when re-distributing flux.
516 updateFluxColumns : `bool`
517 Whether or not to update the `deblend_*` columns in the catalog.
518 This should only be true when the input catalog schema already
519 contains those columns.
520 """
521 # We import here to avoid a circular dependency
522 from .scarletDeblendTask import setDeblenderMetrics
524 useFlux = redistributeImage is not None
525 xy0 = Point2I(*blendData.xy0)
527 blend = dataToScarlet(
528 blendData=blendData,
529 bandIndex=bandIndex,
530 dtype=np.float32,
531 )
533 position = Point2D(*blendData.psfCenter)
534 psfs = observedPsf.computeKernelImage(position).array[None, :, :]
535 modelBox = Box((1,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
536 blend.observation = DummyObservation(
537 psfs=psfs,
538 model_psf=modelPsf[None, :, :],
539 bbox=modelBox,
540 dtype=np.float32,
541 )
543 # Set the metrics for the blend.
544 # TODO: remove this once DM-34558 runs all deblender metrics
545 # in a separate task.
546 if updateFluxColumns:
547 setDeblenderMetrics(blend)
549 # Update the source models if the scarlet models are used as
550 # templates to re-distribute flux from an observation
551 if useFlux:
552 # Extract the image array to re-distribute its flux
553 extent = Extent2I(*blendData.extent)
554 bbox = Box2I(xy0, extent)
555 blend.observation.images = redistributeImage[bbox].array[None, :, :]
556 blend.observation.weights = parentFootprint.spans.asArray()[None, :, :]
557 # Re-distribute the flux for each source in-place
558 weight_sources(blend)
560 # Update the HeavyFootprints for deblended sources
561 # and update the band-dependent catalog columns.
562 for source in blend.sources:
563 sourceRecord = catalog.find(source.recordId)
564 parent = catalog.find(sourceRecord["parent"])
565 peaks = parent.getFootprint().peaks
566 peakIdx = np.where(peaks["id"] == source.peakId)[0][0]
567 source.detectedPeak = peaks[peakIdx]
568 # Set the Footprint
569 heavy = liteModelToHeavy(
570 source=source,
571 blend=blend,
572 xy0=xy0,
573 useFlux=useFlux,
574 )
575 sourceRecord.setFootprint(heavy)
577 if updateFluxColumns:
578 # Set the fraction of pixels with valid data.
579 coverage = calculateFootprintCoverage(heavy, maskImage)
580 sourceRecord.set("deblend_dataCoverage", coverage)
582 # Set the flux of the scarlet model
583 # TODO: this field should probably be deprecated,
584 # since DM-33710 gives users access to the scarlet models.
585 model = source.get_model()[0]
586 sourceRecord.set("deblend_scarletFlux", np.sum(model))
588 # Set the flux at the center of the model
589 peak = heavy.peaks[0]
591 img = heavy.extractImage(fill=0.0)
592 try:
593 sourceRecord.set("deblend_peak_instFlux", img[Point2I(peak["i_x"], peak["i_y"])])
594 except Exception:
595 srcId = sourceRecord.getId()
596 x = peak["i_x"]
597 y = peak["i_y"]
598 logger.warning(
599 f"Source {srcId} at {x},{y} could not set the peak flux with error:",
600 exc_info=1
601 )
602 sourceRecord.set("deblend_peak_instFlux", np.nan)
604 # Set the metrics columns.
605 # TODO: remove this once DM-34558 runs all deblender metrics
606 # in a separate task.
607 sourceRecord.set("deblend_maxOverlap", source.metrics.maxOverlap[0])
608 sourceRecord.set("deblend_fluxOverlap", source.metrics.fluxOverlap[0])
609 sourceRecord.set("deblend_fluxOverlapFraction", source.metrics.fluxOverlapFraction[0])
610 sourceRecord.set("deblend_blendedness", source.metrics.blendedness[0])
613def boundedDataToBox(nBands, boundedData):
614 """Convert bounds from the data storage format to a `scarlet.bbox.Box`
616 Parameters
617 ----------
618 nBands : `int`
619 The number of bands in the model.
620 boundedData :
621 The scarlet data object containing `xy0` and `extent`
622 attributes giving bounding box information in the lsst format
623 `(x, y)`.
625 Returns
626 -------
627 bbox : `scarlet.bbox.Box`
628 The scarlet bounding box generated by the bounds.
629 """
630 xy0 = (0, ) + boundedData.xy0[::-1]
631 extent = (nBands, ) + boundedData.extent[::-1]
632 bbox = Box(shape=extent, origin=xy0)
633 return bbox
636class ComponentCube:
637 """Dummy component for scarlet main sources.
639 This is duck-typed to a `scarlet.lite.LiteComponent` in order to
640 generate a model from the component.
642 If scarlet lite ever implements a component as a data cube,
643 this class can be removed.
644 """
645 def __init__(self, model, center, bbox, model_bbox):
646 """Initialization
648 Parameters
649 ----------
650 model : `numpy.ndarray`
651 The 3D (bands, y, x) model of the component.
652 center : `tuple` of `int`
653 The `(y, x)` center of the component.
654 bbox : `scarlet.bbox.Box`
655 The bounding box of the component.
656 `model_bbox` : `scarlet.bbox.Box`
657 The bounding box of the entire blend.
658 """
659 self.model = model
660 self.center = center
661 self.bbox = bbox
663 def get_model(self, bbox=None):
664 """Generate the model for the source
666 Parameters
667 ----------
668 bbox : `scarlet.bbox.Box`
669 The bounding box to insert the model into.
670 If `bbox` is `None` then the model is returned in its own
671 bounding box.
673 Returns
674 -------
675 model : `numpy.ndarray`
676 The model as a 3D `(band, y, x)` array.
677 """
678 model = self.model
679 if bbox is not None:
680 slices = overlapped_slices(bbox, self.bbox)
681 _model = np.zeros(bbox.shape, model.dtype)
682 _model[slices[0]] = model[slices[1]]
683 model = _model
684 return model
687class DummyParameter(LiteParameter):
688 """A parameter place holder
690 Models in scarlet have parameters, not arrays,
691 for their sed's and morphologies, so this wrapper for
692 the SED and morphology arrays implements the required
693 methods and attributes.
694 """
695 def __init__(self, x):
696 self.x = x
697 self.grad = None
699 def update(self, it, input_grad, *args):
700 pass
702 def grow(self, new_shape, dist):
703 pass
705 def shrink(self, dist):
706 pass
709class DummyObservation(LiteObservation):
710 """An observation that does not have any image data
712 In order to reproduce a model in an observed seeing we make use of the
713 scarlet `LiteObservation` class, but since we are not fitting the model
714 to data we can use empty arrays for the image, variance, and weight data,
715 and zero for the `noise_rms`.
717 Parameters
718 ----------
719 psfs : `numpy.ndarray`
720 The array of PSF images in each band
721 psf_model : `numpy.ndarray`
722 The image of the model PSF.
723 bbox : `scarlet.bbox.Box`
724 dtype : `numpy.dtype`
725 The data type of the model that is generated.
726 """
727 def __init__(self, psfs, model_psf, bbox, dtype):
728 dummyImage = np.zeros([], dtype=dtype)
730 super().__init__(
731 images=dummyImage,
732 variance=dummyImage,
733 weights=dummyImage,
734 psfs=psfs,
735 model_psf=model_psf,
736 convolution_mode="real",
737 noise_rms=0,
738 bbox=bbox,
739 )
742def multibandDataToScarlet(
743 modelData,
744 blendId,
745 observedPsfs=None,
746 dtype=np.float32,
747 mExposure=None,
748 footprint=None,
749):
750 """Convert the store data model into a scarlet lite blend,
751 including observation information.
753 While the typical use case in the science pipelines is to attach
754 scarlet models as footprints to a `SourceCatalog`, it can be advantageous
755 to load an entire multi-band blend. This requires (at a minimum) the
756 PSF in each observed band for the final model.
758 Parameters
759 ----------
760 modelData : `ScarletModelData`
761 The model for all of the blends in a given tract/patch.
762 blendId : `int`
763 The source record ID of the parent record in the catalog.
764 observedPsfs : `list` of `lsst.detection.Psf`
765 The PSF for each observed image.
766 Typically this is obtained using
767 ```butler.get("deep_Coadd_calexp.psf", **dataId)``` for a given
768 (tract, patch, band).
769 If `mExposure` is not `None` then the observed PSFs are generated
770 automatically, otherwise this parameter is required.
771 dtype : `numpy.dtype`
772 Datatype for the rendered model. If `mExposure` is not `None` then
773 this parameter is ignored and the `dtype` of the image is used.
774 mExposure : `lsst.afw.image.MultibandExposure`
775 The observed exposure in each band.
776 This is not required in order to render the models into numpy arrays,
777 however it is required if the user plans to perform a warm restart
778 using the stored models.
779 footprint : `lsst.afw.detection.Footprint`
780 The footprint of the parent blend.
781 This is only required if the user desires to perform a warm restart
782 and wants to mask out the pixels outside of the parent footprint
783 similar to when scarlet was executed in the science pipelines.
785 Returns
786 -------
787 blend : `scarlet.lite.LiteBlend`
788 The full scarlet model for the blend.
789 """
790 # Import here to prevent circular import
791 from .scarletDeblendTask import buildLiteObservation
793 # Extract the blend data
794 blendData = modelData.blends[blendId]
795 nBands = len(blendData.bands)
796 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
797 blend = dataToScarlet(blendData, nBands=nBands)
799 if mExposure is None:
800 psfModels = computePsfImage(observedPsfs, blendData.psfCenter, blendData.bands)
801 blend.observation = DummyObservation(
802 psfs=psfModels,
803 model_psf=modelData.psf[None, :, :],
804 bbox=modelBox,
805 dtype=dtype,
806 )
807 else:
808 blend.observation = buildLiteObservation(
809 modelPsf=modelData.psf,
810 psfCenter=blendData.psfCenter,
811 mExposure=mExposure,
812 footprint=footprint,
813 )
814 return blend
817def dataToScarlet(blendData, bandIndex=None, dtype=np.float32):
818 """Convert the storage data model into a scarlet lite blend
820 Parameters
821 ----------
822 blendData : `ScarletBlendData`
823 Persistable data for the entire blend.
824 bandIndex : `int`
825 Index of model to extract. If `bandIndex` is `None` then the
826 full model is extracted.
827 dtype : `numpy.dtype`
828 The data type of the model that is generated.
830 Returns
831 -------
832 blend : `scarlet.lite.LiteBlend`
833 A scarlet blend model extracted from persisted data.
834 """
835 if bandIndex is not None:
836 nBands = 1
837 else:
838 nBands = len(blendData.bands)
839 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
840 sources = []
841 for sourceId, sourceData in blendData.sources.items():
842 components = []
843 for componentData in sourceData.components:
844 bbox = boundedDataToBox(nBands, componentData)
845 if bandIndex is None:
846 model = componentData.model
847 else:
848 model = componentData.model[bandIndex][None, :, :]
849 component = ComponentCube(
850 model=model,
851 center=tuple(componentData.center[::-1]),
852 bbox=bbox,
853 )
854 components.append(component)
855 for componentData in sourceData.factorizedComponents:
856 bbox = boundedDataToBox(nBands, componentData)
857 # Add dummy values for properties only needed for
858 # model fitting.
859 if bandIndex is None:
860 sed = componentData.sed
861 else:
862 sed = componentData.sed[bandIndex:bandIndex+1]
863 sed = DummyParameter(sed)
864 morph = DummyParameter(componentData.morph)
865 # Note: since we aren't fitting a model, we don't need to
866 # set the RMS of the background.
867 # We set it to NaN just to be safe.
868 component = LiteFactorizedComponent(
869 sed=sed,
870 morph=morph,
871 center=tuple(componentData.center[::-1]),
872 bbox=bbox,
873 model_bbox=modelBox,
874 bg_rms=np.nan
875 )
876 components.append(component)
878 source = LiteSource(components=components, dtype=dtype)
879 source.recordId = sourceId
880 source.peakId = sourceData.peakId
881 sources.append(source)
883 return LiteBlend(sources=sources, observation=None)
886def scarletLiteToData(blend, psfCenter, xy0, bands):
887 """Convert a scarlet lite blend into a persistable data object
889 Parameters
890 ----------
891 blend : `scarlet.lite.LiteBlend`
892 The blend that is being persisted.
893 psfCenter : `tuple` of `int`
894 The center of the PSF.
895 xy0 : `tuple` of `int`
896 The lower coordinate of the entire blend.
897 bands : `tuple[str]`
898 The bands that were deblended.
899 This ignores bands that could not be deblended because the
900 observed PSF could not be modeled.
902 Returns
903 -------
904 blendData : `ScarletBlendDataModel`
905 The data model for a single blend.
906 """
907 sources = {}
908 for source in blend.sources:
909 components = []
910 for component in source.components:
911 if isinstance(component, LiteFactorizedComponent):
912 componentData = ScarletFactorizedComponentData(
913 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]),
914 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]),
915 center=tuple(int(x) for x in component.center[::-1]),
916 sed=component.sed,
917 morph=component.morph,
918 )
919 else:
920 componentData = ScarletComponentData(
921 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]),
922 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]),
923 center=tuple(int(x) for x in component.center[::-1]),
924 model=component.get_model(),
925 )
926 components.append(componentData)
927 sourceData = ScarletSourceData(
928 components=[],
929 factorizedComponents=components,
930 peakId=source.peakId,
931 )
932 sources[source.recordId] = sourceData
934 blendData = ScarletBlendData(
935 xy0=(xy0.x, xy0.y),
936 extent=blend.observation.bbox.shape[1:][::-1],
937 sources=sources,
938 psfCenter=psfCenter,
939 bands=bands,
940 )
942 return blendData
945def scarletToData(blend, psfCenter, xy0, bands):
946 """Convert a scarlet blend into a persistable data object
948 Parameters
949 ----------
950 blend : `scarlet.Blend`
951 The blend that is being persisted.
952 psfCenter : `tuple` of `int`
953 The center of the PSF.
954 xy0 : `tuple` of `int`
955 The lower coordinate of the entire blend.
956 bands : `tuple[str]`
957 The bands that were deblended.
958 This ignores bands that could not be deblended because the
959 observed PSF could not be modeled.
961 Returns
962 -------
963 blendData : `ScarletBlendDataModel`
964 The data model for a single blend.
965 """
966 sources = {}
967 for source in blend.sources:
968 componentData = ScarletComponentData(
969 xy0=tuple(int(x) for x in source.bbox.origin[1:][::-1]),
970 extent=tuple(int(x) for x in source.bbox.shape[1:][::-1]),
971 center=tuple(int(x) for x in source.center[::-1]),
972 model=source.get_model(),
973 )
975 sourceData = ScarletSourceData(
976 components=[componentData],
977 factorizedComponents=[],
978 peakId=source.peakId,
979 )
980 sources[source.recordId] = sourceData
982 blendData = ScarletBlendData(
983 xy0=(int(xy0.x), int(xy0.y)),
984 extent=tuple(int(x) for x in blend.observation.bbox.shape[1:][::-1]),
985 sources=sources,
986 psfCenter=psfCenter,
987 bands=bands,
988 )
990 return blendData