Coverage for python/lsst/meas/extensions/scarlet/io.py: 22%
291 statements
« prev ^ index » next coverage.py v7.4.3, created at 2024-03-14 04:27 -0700
« prev ^ index » next coverage.py v7.4.3, created at 2024-03-14 04:27 -0700
1from __future__ import annotations
3from dataclasses import dataclass
4import json
5import logging
6import numpy as np
7from scarlet.bbox import Box, overlapped_slices
8from scarlet.lite import LiteBlend, LiteFactorizedComponent, LiteObservation, LiteSource, LiteParameter
9from scarlet.lite.measure import weight_sources
10import traceback
12from lsst.geom import Box2I, Extent2I, Point2I, Point2D
13from lsst.afw.image import computePsfImage
14from lsst.afw.detection import Footprint
16from .source import liteModelToHeavy
19__all__ = [
20 "ScarletComponentData",
21 "ScarletFactorizedComponentData",
22 "ScarletSourceData",
23 "ScarletBlendData",
24 "ScarletModelData",
25 "updateBlendRecords",
26 "boundedDataToBox",
27 "ComponentCube",
28 "dataToScarlet",
29 "scarletLiteToData",
30 "scarletToData",
31 "DummyObservation",
32]
34logger = logging.getLogger(__name__)
37@dataclass
38class ScarletComponentData:
39 """Data for a component expressed as a 3D data cube
41 For now this is used for scarlet main source models because
42 their structure is too complex to persist in the same
43 way that scarlet lite components can be persisted.
45 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention,
46 not the scarlet/C++ ``(y, x)`` convention.
48 Attributes
49 ----------
50 xy0 : `tuple` of `int`
51 The lower bound of the components bounding box.
52 extent : `tuple` of `int`
53 The `(width, height)` of the component array.
54 center : `tuple` of `int`
55 The center of the component.
56 model : `numpy.ndarray`
57 The model for the component.
58 """
59 xy0: tuple[int, int]
60 extent: tuple[int, int]
61 center: tuple[float, float]
62 model: np.ndarray
64 def asDict(self) -> dict:
65 """Return the object encoded into a dict for JSON serialization
67 Returns
68 -------
69 result : `dict`
70 The object encoded as a JSON compatible dict
71 """
72 return {
73 "xy0": self.xy0,
74 "extent": self.extent,
75 "center": self.extent,
76 "model": tuple(self.model.flatten().astype(float))
77 }
79 @classmethod
80 def fromDict(cls, data: dict) -> "ScarletComponentData":
81 """Reconstruct `ScarletComponentData` from JSON compatible dict
83 Parameters
84 ----------
85 data : `dict`
86 Dictionary representation of the object
88 Returns
89 -------
90 result : `ScarletComponentData`
91 The reconstructed object
92 """
93 dataShallowCopy = dict(data)
94 dataShallowCopy["xy0"] = tuple(data["xy0"])
95 dataShallowCopy["extent"] = tuple(data["extent"])
96 shape = dataShallowCopy['extent'][::-1]
97 numBands = shape[0] * shape[1]
98 dataShallowCopy['model'] = np.array(data['model']).reshape((numBands,) + shape).astype(np.float32)
99 return cls(**dataShallowCopy)
102@dataclass
103class ScarletFactorizedComponentData:
104 """Data for a factorized component
106 Note that both `xy0` and `extent` use lsst ``(x, y)`` convention,
107 not the scarlet/C++ ``(y, x)`` convention.
109 Attributes
110 ----------
111 xy0 : `tuple` of `int`
112 The lower bound of the components bounding box.
113 extent : `tuple` of `int`
114 The `(width, height)` of the component array.
115 center : `tuple` of `int`
116 The ``(x, y)`` center of the component.
117 Note: once this is converted into a scarlet `LiteBlend` the source has
118 the traditional c++ `(y, x)` ordering.
119 sed : `numpy.ndarray`
120 The SED of the component.
121 morph : `numpy.ndarray`
122 The 2D morphology of the component.
123 """
124 xy0: tuple[int, int]
125 extent: tuple[int, int]
126 center: tuple[float, float]
127 sed: np.ndarray
128 morph: np.ndarray
130 def asDict(self) -> dict:
131 """Return the object encoded into a dict for JSON serialization
133 Returns
134 -------
135 result : `dict`
136 The object encoded as a JSON compatible dict
137 """
138 return {
139 "xy0": self.xy0,
140 "extent": self.extent,
141 "center": self.center,
142 "sed": tuple(self.sed.astype(float)),
143 "morph": tuple(self.morph.flatten().astype(float))
144 }
146 @classmethod
147 def fromDict(cls, data: dict) -> "ScarletFactorizedComponentData":
148 """Reconstruct `ScarletFactorizedComponentData` from JSON compatible
149 dict.
151 Parameters
152 ----------
153 data : `dict`
154 Dictionary representation of the object
156 Returns
157 -------
158 result : `ScarletFactorizedComponentData`
159 The reconstructed object
160 """
161 dataShallowCopy = dict(data)
162 dataShallowCopy["xy0"] = tuple(data["xy0"])
163 dataShallowCopy["extent"] = tuple(data["extent"])
164 shape = dataShallowCopy['extent'][::-1]
165 dataShallowCopy["sed"] = np.array(data["sed"]).astype(np.float32)
166 dataShallowCopy['morph'] = np.array(data['morph']).reshape(shape).astype(np.float32)
167 return cls(**dataShallowCopy)
170@dataclass
171class ScarletSourceData:
172 """Data for a scarlet source
174 Attributes
175 ----------
176 components : `list` of `ScarletComponentData`
177 The components contained in the source that are not factorized.
178 factorizedComponents : `list` of `ScarletFactorizedComponentData`
179 The components contained in the source that are factorized.
180 peakId : `int`
181 The peak ID of the source in it's parent's footprint peak catalog.
182 """
183 components: list[ScarletComponentData]
184 factorizedComponents: list[ScarletFactorizedComponentData]
185 peakId: int
187 def asDict(self) -> dict:
188 """Return the object encoded into a dict for JSON serialization
190 Returns
191 -------
192 result : `dict`
193 The object encoded as a JSON compatible dict
194 """
195 result = {
196 "components": [],
197 "factorized": [],
198 "peakId": self.peakId,
199 }
200 for component in self.components:
201 reduced = component.asDict()
202 result["components"].append(reduced)
204 for component in self.factorizedComponents:
205 reduced = component.asDict()
206 result["factorized"].append(reduced)
207 return result
209 @classmethod
210 def fromDict(cls, data: dict) -> "ScarletSourceData":
211 """Reconstruct `ScarletSourceData` from JSON compatible
212 dict.
214 Parameters
215 ----------
216 data : `dict`
217 Dictionary representation of the object
219 Returns
220 -------
221 result : `ScarletSourceData`
222 The reconstructed object
223 """
224 dataShallowCopy = dict(data)
225 del dataShallowCopy["factorized"]
226 components = []
227 for component in data['components']:
228 component = ScarletComponentData.fromDict(component)
229 components.append(component)
230 dataShallowCopy['components'] = components
232 factorized = []
233 for component in data["factorized"]:
234 component = ScarletFactorizedComponentData.fromDict(component)
235 factorized.append(component)
236 dataShallowCopy['factorizedComponents'] = factorized
237 dataShallowCopy["peakId"] = int(data["peakId"])
238 return cls(**dataShallowCopy)
241@dataclass
242class ScarletBlendData:
243 """Data for an entire blend.
245 Note that `xy0`, `extent`, and `psfCenter` use lsst ``(x, y)`` convention,
246 not the scarlet/C++ ``(y, x)`` convention.
248 Attributes
249 ----------
250 xy0 : `tuple` of `int`
251 The lower bound of the components bounding box.
252 extent : `tuple` of `int`
253 The `(width, height)` of the component array.
254 sources : `dict` of `int`: `ScarletSourceData`
255 Data for the sources contained in the blend.
256 psfCenter : `tuple` of `int`
257 The location used for the center of the PSF for
258 the blend.
259 bands : `list` of `str`
260 The names of the bands.
261 The order of the bands must be the same as the order of
262 the multiband model arrays, and SEDs.
263 """
264 xy0: tuple[int, int]
265 extent: tuple[int, int]
266 sources: dict[int, ScarletSourceData]
267 psfCenter: tuple[float, float]
268 bands: tuple[str]
270 def asDict(self) -> dict:
271 """Return the object encoded into a dict for JSON serialization
273 Returns
274 -------
275 result : `dict`
276 The object encoded as a JSON compatible dict
277 """
278 return {
279 "xy0": self.xy0,
280 "extent": self.extent,
281 "psfCenter": self.psfCenter,
282 "sources": {id: source.asDict() for id, source in self.sources.items()},
283 "bands": self.bands,
284 }
286 @classmethod
287 def fromDict(cls, data: dict) -> "ScarletBlendData":
288 """Reconstruct `ScarletBlendData` from JSON compatible
289 dict.
291 Parameters
292 ----------
293 data : `dict`
294 Dictionary representation of the object
296 Returns
297 -------
298 result : `ScarletBlendData`
299 The reconstructed object
300 """
301 dataShallowCopy = dict(data)
302 dataShallowCopy["xy0"] = tuple(data["xy0"])
303 dataShallowCopy["extent"] = tuple(data["extent"])
304 dataShallowCopy["psfCenter"] = tuple(data["psfCenter"])
305 dataShallowCopy["sources"] = {int(id): ScarletSourceData.fromDict(source)
306 for id, source in data['sources'].items()}
307 dataShallowCopy["bands"] = tuple(data["bands"])
308 return cls(**dataShallowCopy)
311class ScarletModelData:
312 """A container that propagates scarlet models for an entire `SourceCatalog`
313 """
314 def __init__(self, psf, blends=None):
315 """Initialize an instance
317 Parameters
318 ----------
319 psf : `numpy.ndarray`
320 The 2D array of the PSF in scarlet model space.
321 This is typically a narrow Gaussian integrated over the
322 pixels in the exposure.
323 blends : `dict` of [`int`: `ScarletBlendData`]
324 Initial `dict` that maps parent IDs from the source catalog
325 to the scarlet model data for the parent blend.
326 """
327 self.psf = psf
328 if blends is None:
329 blends = {}
330 self.blends = blends
332 def json(self) -> str:
333 """Serialize the data model to a JSON formatted string
335 Returns
336 -------
337 result : `str`
338 The result of the object converted into a JSON format
339 """
340 result = {
341 "psfShape": self.psf.shape,
342 "psf": list(self.psf.flatten()),
343 "blends": {id: blend.asDict() for id, blend in self.blends.items()}
344 }
345 return json.dumps(result)
347 @classmethod
348 def parse_obj(cls, data: dict) -> "ScarletModelData":
349 """Construct a ScarletModelData from python decoded JSON object.
351 Parameters
352 ----------
353 inMemoryDataset : `Mapping`
354 The result of json.load(s) on a JSON persisted ScarletModelData
356 Returns
357 -------
358 result : `ScarletModelData`
359 The `ScarletModelData` that was loaded the from the input object
360 """
361 dataShallowCopy = dict(data)
362 modelPsf = np.array(
363 dataShallowCopy["psf"]).reshape(dataShallowCopy.pop("psfShape")).astype(np.float32)
364 dataShallowCopy["psf"] = modelPsf
365 dataShallowCopy["blends"] = {
366 int(id): ScarletBlendData.fromDict(blend)
367 for id, blend in data['blends'].items()
368 }
369 return cls(**dataShallowCopy)
371 def updateCatalogFootprints(self, catalog, band, psfModel, maskImage=None, redistributeImage=None,
372 removeScarletData=True, updateFluxColumns=True):
373 """Use the scarlet models to set HeavyFootprints for modeled sources
375 Parameters
376 ----------
377 catalog : `lsst.afw.table.SourceCatalog`
378 The catalog missing heavy footprints for deblended sources.
379 band : `str`
380 The name of the band that the catalog data describes.
381 psfModel : `lsst.afw.detection.Psf`
382 The observed PSF model for the catalog.
383 maskImage : `lsst.afw.image.MaskX`
384 The masked image used to calculate the fraction of pixels
385 in each footprint with valid data.
386 This is only used when `updateFluxColumns` is `True`,
387 and is required if it is.
388 redistributeImage : `lsst.afw.image.Image`
389 The image that is the source for flux re-distribution.
390 If `redistributeImage` is `None` then flux re-distribution is
391 not performed.
392 removeScarletData : `bool`
393 Whether or not to remove `ScarletBlendData` for each blend
394 in order to save memory.
395 updateFluxColumns : `bool`
396 Whether or not to update the `deblend_*` columns in the catalog.
397 This should only be true when the input catalog schema already
398 contains those columns.
399 """
400 # Iterate over the blends, since flux re-distribution must be done on
401 # all of the children with the same parent
402 parents = catalog[catalog["parent"] == 0]
404 for parentRecord in parents:
405 parentId = parentRecord.getId()
407 try:
408 blendModel = self.blends[parentId]
409 except KeyError:
410 # The parent was skipped in the deblender, so there are
411 # no models for its sources.
412 continue
414 parent = catalog.find(parentId)
415 if updateFluxColumns and redistributeImage is not None:
416 # Update the data coverage
417 # (1 - # of NO_DATA pixels/# of pixels)
418 parentRecord["deblend_dataCoverage"] = calculateFootprintCoverage(
419 parent.getFootprint(),
420 maskImage
421 )
423 if band not in blendModel.bands:
424 parent = catalog.find(parentId)
425 peaks = parent.getFootprint().peaks
426 # Set the footprint and coverage of the sources in this blend
427 # to zero
428 if updateFluxColumns:
429 parentRecord["deblend_dataCoverage"] = 0
430 for sourceId, sourceData in blendModel.sources.items():
431 sourceRecord = catalog.find(sourceId)
432 footprint = Footprint()
433 peakIdx = np.where(peaks["id"] == sourceData.peakId)[0][0]
434 peak = peaks[peakIdx]
435 footprint.addPeak(peak.getIx(), peak.getIy(), peak.getPeakValue())
436 sourceRecord.setFootprint(footprint)
437 if updateFluxColumns:
438 sourceRecord["deblend_dataCoverage"] = 0
439 continue
441 # Get the index of the model for the given band
442 bandIndex = blendModel.bands.index(band)
444 updateBlendRecords(
445 blendData=blendModel,
446 catalog=catalog,
447 modelPsf=self.psf,
448 observedPsf=psfModel,
449 maskImage=maskImage,
450 redistributeImage=redistributeImage,
451 bandIndex=bandIndex,
452 parentFootprint=parentRecord.getFootprint(),
453 updateFluxColumns=updateFluxColumns,
454 )
456 # Save memory by removing the data for the blend
457 if removeScarletData:
458 del self.blends[parentId]
461def calculateFootprintCoverage(footprint, maskImage):
462 """Calculate the fraction of pixels with no data in a Footprint
464 Parameters
465 ----------
466 footprint : `lsst.afw.detection.Footprint`
467 The footprint to check for missing data.
468 maskImage : `lsst.afw.image.MaskX`
469 The mask image with the ``NO_DATA`` bit set.
471 Returns
472 -------
473 coverage : `float`
474 The fraction of pixels in `footprint` where the ``NO_DATA`` bit is set.
475 """
476 # Store the value of "NO_DATA" from the mask plane.
477 noDataInt = 2**maskImage.getMaskPlaneDict()["NO_DATA"]
479 # Calculate the coverage in the footprint
480 bbox = footprint.getBBox()
481 spans = footprint.spans.asArray()
482 totalArea = footprint.getArea()
483 mask = maskImage[bbox].array & noDataInt
484 noData = (mask * spans) > 0
485 coverage = 1 - np.sum(noData)/totalArea
486 return coverage
489def updateBlendRecords(blendData, catalog, modelPsf, observedPsf, maskImage, redistributeImage, bandIndex,
490 parentFootprint, updateFluxColumns):
491 """Create footprints and update band-dependent columns in the catalog
493 Parameters
494 ----------
495 blendData : `ScarletBlendData`
496 Persistable data for the entire blend.
497 catalog : `lsst.afw.table.SourceCatalog`
498 The catalog that is being updated.
499 modelPsf : `numpy.ndarray`
500 The 2D model of the PSF.
501 observedPsf : `lsst.afw.detection.Psf`
502 The observed PSF model for the catalog.
503 maskImage : `lsst.afw.image.MaskX`
504 The masked image used to calculate the fraction of pixels
505 in each footprint with valid data.
506 This is only used when `updateFluxColumns` is `True`,
507 and is required if it is.
508 redistributeImage : `lsst.afw.image.Image`
509 The image that is the source for flux re-distribution.
510 If `redistributeImage` is `None` then flux re-distribution is
511 not performed.
512 bandIndex : `int`
513 The number of the band to extract.
514 parentFootprint : `lsst.afw.Footprint`
515 The footprint of the parent, used for masking out the model
516 when re-distributing flux.
517 updateFluxColumns : `bool`
518 Whether or not to update the `deblend_*` columns in the catalog.
519 This should only be true when the input catalog schema already
520 contains those columns.
521 """
522 # We import here to avoid a circular dependency
523 from .scarletDeblendTask import setDeblenderMetrics
525 useFlux = redistributeImage is not None
526 xy0 = Point2I(*blendData.xy0)
528 blend = dataToScarlet(
529 blendData=blendData,
530 bandIndex=bandIndex,
531 dtype=np.float32,
532 )
534 position = Point2D(*blendData.psfCenter)
535 psfs = observedPsf.computeKernelImage(position).array[None, :, :]
536 modelBox = Box((1,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
537 blend.observation = DummyObservation(
538 psfs=psfs,
539 model_psf=modelPsf[None, :, :],
540 bbox=modelBox,
541 dtype=np.float32,
542 )
544 # Set the metrics for the blend.
545 # TODO: remove this once DM-34558 runs all deblender metrics
546 # in a separate task.
547 if updateFluxColumns:
548 setDeblenderMetrics(blend)
550 # Update the source models if the scarlet models are used as
551 # templates to re-distribute flux from an observation
552 if useFlux:
553 # Extract the image array to re-distribute its flux
554 extent = Extent2I(*blendData.extent)
555 bbox = Box2I(xy0, extent)
556 blend.observation.images = redistributeImage[bbox].array[None, :, :]
557 blend.observation.weights = parentFootprint.spans.asArray()[None, :, :]
558 # Re-distribute the flux for each source in-place
559 weight_sources(blend)
561 # Update the HeavyFootprints for deblended sources
562 # and update the band-dependent catalog columns.
563 for source in blend.sources:
564 sourceRecord = catalog.find(source.recordId)
565 parent = catalog.find(sourceRecord["parent"])
566 peaks = parent.getFootprint().peaks
567 peakIdx = np.where(peaks["id"] == source.peakId)[0][0]
568 source.detectedPeak = peaks[peakIdx]
569 # Set the Footprint
570 heavy = liteModelToHeavy(
571 source=source,
572 blend=blend,
573 xy0=xy0,
574 useFlux=useFlux,
575 )
576 sourceRecord.setFootprint(heavy)
578 if updateFluxColumns:
579 # Set the fraction of pixels with valid data.
580 coverage = calculateFootprintCoverage(heavy, maskImage)
581 sourceRecord.set("deblend_dataCoverage", coverage)
583 # Set the flux of the scarlet model
584 # TODO: this field should probably be deprecated,
585 # since DM-33710 gives users access to the scarlet models.
586 model = source.get_model()[0]
587 sourceRecord.set("deblend_scarletFlux", np.sum(model))
589 # Set the flux at the center of the model
590 peak = heavy.peaks[0]
592 img = heavy.extractImage(fill=0.0)
593 try:
594 sourceRecord.set("deblend_peak_instFlux", img[Point2I(peak["i_x"], peak["i_y"])])
595 except Exception:
596 srcId = sourceRecord.getId()
597 x = peak["i_x"]
598 y = peak["i_y"]
599 logger.warn(f"Source {srcId} at {x},{y} could not set the peak flux with error:")
600 traceback.print_exc()
601 sourceRecord.set("deblend_peak_instFlux", np.nan)
603 # Set the metrics columns.
604 # TODO: remove this once DM-34558 runs all deblender metrics
605 # in a separate task.
606 sourceRecord.set("deblend_maxOverlap", source.metrics.maxOverlap[0])
607 sourceRecord.set("deblend_fluxOverlap", source.metrics.fluxOverlap[0])
608 sourceRecord.set("deblend_fluxOverlapFraction", source.metrics.fluxOverlapFraction[0])
609 sourceRecord.set("deblend_blendedness", source.metrics.blendedness[0])
612def boundedDataToBox(nBands, boundedData):
613 """Convert bounds from the data storage format to a `scarlet.bbox.Box`
615 Parameters
616 ----------
617 nBands : `int`
618 The number of bands in the model.
619 boundedData :
620 The scarlet data object containing `xy0` and `extent`
621 attributes giving bounding box information in the lsst format
622 `(x, y)`.
624 Returns
625 -------
626 bbox : `scarlet.bbox.Box`
627 The scarlet bounding box generated by the bounds.
628 """
629 xy0 = (0, ) + boundedData.xy0[::-1]
630 extent = (nBands, ) + boundedData.extent[::-1]
631 bbox = Box(shape=extent, origin=xy0)
632 return bbox
635class ComponentCube:
636 """Dummy component for scarlet main sources.
638 This is duck-typed to a `scarlet.lite.LiteComponent` in order to
639 generate a model from the component.
641 If scarlet lite ever implements a component as a data cube,
642 this class can be removed.
643 """
644 def __init__(self, model, center, bbox, model_bbox):
645 """Initialization
647 Parameters
648 ----------
649 model : `numpy.ndarray`
650 The 3D (bands, y, x) model of the component.
651 center : `tuple` of `int`
652 The `(y, x)` center of the component.
653 bbox : `scarlet.bbox.Box`
654 The bounding box of the component.
655 `model_bbox` : `scarlet.bbox.Box`
656 The bounding box of the entire blend.
657 """
658 self.model = model
659 self.center = center
660 self.bbox = bbox
662 def get_model(self, bbox=None):
663 """Generate the model for the source
665 Parameters
666 ----------
667 bbox : `scarlet.bbox.Box`
668 The bounding box to insert the model into.
669 If `bbox` is `None` then the model is returned in its own
670 bounding box.
672 Returns
673 -------
674 model : `numpy.ndarray`
675 The model as a 3D `(band, y, x)` array.
676 """
677 model = self.model
678 if bbox is not None:
679 slices = overlapped_slices(bbox, self.bbox)
680 _model = np.zeros(bbox.shape, model.dtype)
681 _model[slices[0]] = model[slices[1]]
682 model = _model
683 return model
686class DummyParameter(LiteParameter):
687 """A parameter place holder
689 Models in scarlet have parameters, not arrays,
690 for their sed's and morphologies, so this wrapper for
691 the SED and morphology arrays implements the required
692 methods and attributes.
693 """
694 def __init__(self, x):
695 self.x = x
696 self.grad = None
698 def update(self, it, input_grad, *args):
699 pass
701 def grow(self, new_shape, dist):
702 pass
704 def shrink(self, dist):
705 pass
708class DummyObservation(LiteObservation):
709 """An observation that does not have any image data
711 In order to reproduce a model in an observed seeing we make use of the
712 scarlet `LiteObservation` class, but since we are not fitting the model
713 to data we can use empty arrays for the image, variance, and weight data,
714 and zero for the `noise_rms`.
716 Parameters
717 ----------
718 psfs : `numpy.ndarray`
719 The array of PSF images in each band
720 psf_model : `numpy.ndarray`
721 The image of the model PSF.
722 bbox : `scarlet.bbox.Box`
723 dtype : `numpy.dtype`
724 The data type of the model that is generated.
725 """
726 def __init__(self, psfs, model_psf, bbox, dtype):
727 dummyImage = np.zeros([], dtype=dtype)
729 super().__init__(
730 images=dummyImage,
731 variance=dummyImage,
732 weights=dummyImage,
733 psfs=psfs,
734 model_psf=model_psf,
735 convolution_mode="real",
736 noise_rms=0,
737 bbox=bbox,
738 )
741def multibandDataToScarlet(
742 modelData,
743 blendId,
744 observedPsfs=None,
745 dtype=np.float32,
746 mExposure=None,
747 footprint=None,
748):
749 """Convert the store data model into a scarlet lite blend,
750 including observation information.
752 While the typical use case in the science pipelines is to attach
753 scarlet models as footprints to a `SourceCatalog`, it can be advantageous
754 to load an entire multi-band blend. This requires (at a minimum) the
755 PSF in each observed band for the final model.
757 Parameters
758 ----------
759 modelData : `ScarletModelData`
760 The model for all of the blends in a given tract/patch.
761 blendId : `int`
762 The source record ID of the parent record in the catalog.
763 observedPsfs : `list` of `lsst.detection.Psf`
764 The PSF for each observed image.
765 Typically this is obtained using
766 ```butler.get("deep_Coadd_calexp.psf", **dataId)``` for a given
767 (tract, patch, band).
768 If `mExposure` is not `None` then the observed PSFs are generated
769 automatically, otherwise this parameter is required.
770 dtype : `numpy.dtype`
771 Datatype for the rendered model. If `mExposure` is not `None` then
772 this parameter is ignored and the `dtype` of the image is used.
773 mExposure : `lsst.afw.image.MultibandExposure`
774 The observed exposure in each band.
775 This is not required in order to render the models into numpy arrays,
776 however it is required if the user plans to perform a warm restart
777 using the stored models.
778 footprint : `lsst.afw.detection.Footprint`
779 The footprint of the parent blend.
780 This is only required if the user desires to perform a warm restart
781 and wants to mask out the pixels outside of the parent footprint
782 similar to when scarlet was executed in the science pipelines.
784 Returns
785 -------
786 blend : `scarlet.lite.LiteBlend`
787 The full scarlet model for the blend.
788 """
789 # Import here to prevent circular import
790 from .scarletDeblendTask import buildLiteObservation
792 # Extract the blend data
793 blendData = modelData.blends[blendId]
794 nBands = len(blendData.bands)
795 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
796 blend = dataToScarlet(blendData, nBands=nBands)
798 if mExposure is None:
799 psfModels = computePsfImage(observedPsfs, blendData.psfCenter, blendData.bands)
800 blend.observation = DummyObservation(
801 psfs=psfModels,
802 model_psf=modelData.psf[None, :, :],
803 bbox=modelBox,
804 dtype=dtype,
805 )
806 else:
807 blend.observation = buildLiteObservation(
808 modelPsf=modelData.psf,
809 psfCenter=blendData.psfCenter,
810 mExposure=mExposure,
811 footprint=footprint,
812 )
813 return blend
816def dataToScarlet(blendData, bandIndex=None, dtype=np.float32):
817 """Convert the storage data model into a scarlet lite blend
819 Parameters
820 ----------
821 blendData : `ScarletBlendData`
822 Persistable data for the entire blend.
823 bandIndex : `int`
824 Index of model to extract. If `bandIndex` is `None` then the
825 full model is extracted.
826 dtype : `numpy.dtype`
827 The data type of the model that is generated.
829 Returns
830 -------
831 blend : `scarlet.lite.LiteBlend`
832 A scarlet blend model extracted from persisted data.
833 """
834 if bandIndex is not None:
835 nBands = 1
836 else:
837 nBands = len(blendData.bands)
838 modelBox = Box((nBands,) + tuple(blendData.extent[::-1]), origin=(0, 0, 0))
839 sources = []
840 for sourceId, sourceData in blendData.sources.items():
841 components = []
842 for componentData in sourceData.components:
843 bbox = boundedDataToBox(nBands, componentData)
844 if bandIndex is None:
845 model = componentData.model
846 else:
847 model = componentData.model[bandIndex][None, :, :]
848 component = ComponentCube(
849 model=model,
850 center=tuple(componentData.center[::-1]),
851 bbox=bbox,
852 )
853 components.append(component)
854 for componentData in sourceData.factorizedComponents:
855 bbox = boundedDataToBox(nBands, componentData)
856 # Add dummy values for properties only needed for
857 # model fitting.
858 if bandIndex is None:
859 sed = componentData.sed
860 else:
861 sed = componentData.sed[bandIndex:bandIndex+1]
862 sed = DummyParameter(sed)
863 morph = DummyParameter(componentData.morph)
864 # Note: since we aren't fitting a model, we don't need to
865 # set the RMS of the background.
866 # We set it to NaN just to be safe.
867 component = LiteFactorizedComponent(
868 sed=sed,
869 morph=morph,
870 center=tuple(componentData.center[::-1]),
871 bbox=bbox,
872 model_bbox=modelBox,
873 bg_rms=np.nan
874 )
875 components.append(component)
877 source = LiteSource(components=components, dtype=dtype)
878 source.recordId = sourceId
879 source.peakId = sourceData.peakId
880 sources.append(source)
882 return LiteBlend(sources=sources, observation=None)
885def scarletLiteToData(blend, psfCenter, xy0, bands):
886 """Convert a scarlet lite blend into a persistable data object
888 Parameters
889 ----------
890 blend : `scarlet.lite.LiteBlend`
891 The blend that is being persisted.
892 psfCenter : `tuple` of `int`
893 The center of the PSF.
894 xy0 : `tuple` of `int`
895 The lower coordinate of the entire blend.
896 bands : `tuple[str]`
897 The bands that were deblended.
898 This ignores bands that could not be deblended because the
899 observed PSF could not be modeled.
901 Returns
902 -------
903 blendData : `ScarletBlendDataModel`
904 The data model for a single blend.
905 """
906 sources = {}
907 for source in blend.sources:
908 components = []
909 for component in source.components:
910 if isinstance(component, LiteFactorizedComponent):
911 componentData = ScarletFactorizedComponentData(
912 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]),
913 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]),
914 center=tuple(int(x) for x in component.center[::-1]),
915 sed=component.sed,
916 morph=component.morph,
917 )
918 else:
919 componentData = ScarletComponentData(
920 xy0=tuple(int(x) for x in component.bbox.origin[1:][::-1]),
921 extent=tuple(int(x) for x in component.bbox.shape[1:][::-1]),
922 center=tuple(int(x) for x in component.center[::-1]),
923 model=component.get_model(),
924 )
925 components.append(componentData)
926 sourceData = ScarletSourceData(
927 components=[],
928 factorizedComponents=components,
929 peakId=source.peakId,
930 )
931 sources[source.recordId] = sourceData
933 blendData = ScarletBlendData(
934 xy0=(xy0.x, xy0.y),
935 extent=blend.observation.bbox.shape[1:][::-1],
936 sources=sources,
937 psfCenter=psfCenter,
938 bands=bands,
939 )
941 return blendData
944def scarletToData(blend, psfCenter, xy0, bands):
945 """Convert a scarlet blend into a persistable data object
947 Parameters
948 ----------
949 blend : `scarlet.Blend`
950 The blend that is being persisted.
951 psfCenter : `tuple` of `int`
952 The center of the PSF.
953 xy0 : `tuple` of `int`
954 The lower coordinate of the entire blend.
955 bands : `tuple[str]`
956 The bands that were deblended.
957 This ignores bands that could not be deblended because the
958 observed PSF could not be modeled.
960 Returns
961 -------
962 blendData : `ScarletBlendDataModel`
963 The data model for a single blend.
964 """
965 sources = {}
966 for source in blend.sources:
967 componentData = ScarletComponentData(
968 xy0=tuple(int(x) for x in source.bbox.origin[1:][::-1]),
969 extent=tuple(int(x) for x in source.bbox.shape[1:][::-1]),
970 center=tuple(int(x) for x in source.center[::-1]),
971 model=source.get_model(),
972 )
974 sourceData = ScarletSourceData(
975 components=[componentData],
976 factorizedComponents=[],
977 peakId=source.peakId,
978 )
979 sources[source.recordId] = sourceData
981 blendData = ScarletBlendData(
982 xy0=(int(xy0.x), int(xy0.y)),
983 extent=tuple(int(x) for x in blend.observation.bbox.shape[1:][::-1]),
984 sources=sources,
985 psfCenter=psfCenter,
986 bands=bands,
987 )
989 return blendData