Coverage for python/lsst/pipe/tasks/healSparseMapping.py: 17%
391 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-02-11 03:40 -0800
« prev ^ index » next coverage.py v6.5.0, created at 2023-02-11 03:40 -0800
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22__all__ = ["HealSparseInputMapTask", "HealSparseInputMapConfig",
23 "HealSparseMapFormatter", "HealSparsePropertyMapConnections",
24 "HealSparsePropertyMapConfig", "HealSparsePropertyMapTask",
25 "ConsolidateHealSparsePropertyMapConnections",
26 "ConsolidateHealSparsePropertyMapConfig",
27 "ConsolidateHealSparsePropertyMapTask"]
29from collections import defaultdict
30import warnings
31import numbers
32import numpy as np
33import hpgeom as hpg
34import healsparse as hsp
36import lsst.pex.config as pexConfig
37import lsst.pipe.base as pipeBase
38import lsst.geom
39import lsst.afw.geom as afwGeom
40from lsst.daf.butler import Formatter
41from lsst.skymap import BaseSkyMap
42from lsst.utils.timer import timeMethod
43from .healSparseMappingProperties import (BasePropertyMap, BasePropertyMapConfig,
44 PropertyMapMap, compute_approx_psf_size_and_shape)
47class HealSparseMapFormatter(Formatter):
48 """Interface for reading and writing healsparse.HealSparseMap files."""
49 unsupportedParameters = frozenset()
50 supportedExtensions = frozenset({".hsp", ".fit", ".fits"})
51 extension = '.hsp'
53 def read(self, component=None):
54 # Docstring inherited from Formatter.read.
55 path = self.fileDescriptor.location.path
57 if component == 'coverage':
58 try:
59 data = hsp.HealSparseCoverage.read(path)
60 except (OSError, RuntimeError):
61 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
63 return data
65 if self.fileDescriptor.parameters is None:
66 pixels = None
67 degrade_nside = None
68 else:
69 pixels = self.fileDescriptor.parameters.get('pixels', None)
70 degrade_nside = self.fileDescriptor.parameters.get('degrade_nside', None)
71 try:
72 data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside)
73 except (OSError, RuntimeError):
74 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
76 return data
78 def write(self, inMemoryDataset):
79 # Docstring inherited from Formatter.write.
80 # Update the location with the formatter-preferred file extension
81 self.fileDescriptor.location.updateExtension(self.extension)
82 inMemoryDataset.write(self.fileDescriptor.location.path, clobber=True)
85def _is_power_of_two(value):
86 """Check that value is a power of two.
88 Parameters
89 ----------
90 value : `int`
91 Value to check.
93 Returns
94 -------
95 is_power_of_two : `bool`
96 True if value is a power of two; False otherwise, or
97 if value is not an integer.
98 """
99 if not isinstance(value, numbers.Integral):
100 return False
102 # See https://stackoverflow.com/questions/57025836
103 # Every power of 2 has exactly 1 bit set to 1; subtracting
104 # 1 therefore flips every preceding bit. If you and that
105 # together with the original value it must be 0.
106 return (value & (value - 1) == 0) and value != 0
109class HealSparseInputMapConfig(pexConfig.Config):
110 """Configuration parameters for HealSparseInputMapTask"""
111 nside = pexConfig.Field(
112 doc="Mapping healpix nside. Must be power of 2.",
113 dtype=int,
114 default=32768,
115 check=_is_power_of_two,
116 )
117 nside_coverage = pexConfig.Field(
118 doc="HealSparse coverage map nside. Must be power of 2.",
119 dtype=int,
120 default=256,
121 check=_is_power_of_two,
122 )
123 bad_mask_min_coverage = pexConfig.Field(
124 doc=("Minimum area fraction of a map healpixel pixel that must be "
125 "covered by bad pixels to be removed from the input map. "
126 "This is approximate."),
127 dtype=float,
128 default=0.5,
129 )
132class HealSparseInputMapTask(pipeBase.Task):
133 """Task for making a HealSparse input map."""
135 ConfigClass = HealSparseInputMapConfig
136 _DefaultName = "healSparseInputMap"
138 def __init__(self, **kwargs):
139 pipeBase.Task.__init__(self, **kwargs)
141 self.ccd_input_map = None
143 def build_ccd_input_map(self, bbox, wcs, ccds):
144 """Build a map from ccd valid polygons or bounding boxes.
146 Parameters
147 ----------
148 bbox : `lsst.geom.Box2I`
149 Bounding box for region to build input map.
150 wcs : `lsst.afw.geom.SkyWcs`
151 WCS object for region to build input map.
152 ccds : `lsst.afw.table.ExposureCatalog`
153 Exposure catalog with ccd data from coadd inputs.
154 """
155 with warnings.catch_warnings():
156 # Healsparse will emit a warning if nside coverage is greater than
157 # 128. In the case of generating patch input maps, and not global
158 # maps, high nside coverage works fine, so we can suppress this
159 # warning.
160 warnings.simplefilter("ignore")
161 self.ccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage,
162 nside_sparse=self.config.nside,
163 dtype=hsp.WIDE_MASK,
164 wide_mask_maxbits=len(ccds))
165 self._wcs = wcs
166 self._bbox = bbox
167 self._ccds = ccds
169 pixel_scale = wcs.getPixelScale().asArcseconds()
170 hpix_area_arcsec2 = hpg.nside_to_pixel_area(self.config.nside, degrees=True)*(3600.**2.)
171 self._min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.)
173 metadata = {}
174 self._bits_per_visit_ccd = {}
175 self._bits_per_visit = defaultdict(list)
176 for bit, ccd_row in enumerate(ccds):
177 metadata[f"B{bit:04d}CCD"] = ccd_row["ccd"]
178 metadata[f"B{bit:04d}VIS"] = ccd_row["visit"]
179 metadata[f"B{bit:04d}WT"] = ccd_row["weight"]
181 self._bits_per_visit_ccd[(ccd_row["visit"], ccd_row["ccd"])] = bit
182 self._bits_per_visit[ccd_row["visit"]].append(bit)
184 ccd_poly = ccd_row.getValidPolygon()
185 if ccd_poly is None:
186 ccd_poly = afwGeom.Polygon(lsst.geom.Box2D(ccd_row.getBBox()))
187 # Detectors need to be rendered with their own wcs.
188 ccd_poly_radec = self._pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices())
190 # Create a ccd healsparse polygon
191 poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0],
192 dec=ccd_poly_radec[: -1, 1],
193 value=[bit])
194 self.ccd_input_map.set_bits_pix(poly.get_pixels(nside=self.ccd_input_map.nside_sparse),
195 [bit])
197 # Cut down to the overall bounding box with associated wcs.
198 bbox_afw_poly = afwGeom.Polygon(lsst.geom.Box2D(bbox))
199 bbox_poly_radec = self._pixels_to_radec(self._wcs,
200 bbox_afw_poly.convexHull().getVertices())
201 bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1],
202 value=np.arange(self.ccd_input_map.wide_mask_maxbits))
203 bbox_poly_map = bbox_poly.get_map_like(self.ccd_input_map)
204 self.ccd_input_map = hsp.and_intersection([self.ccd_input_map, bbox_poly_map])
205 self.ccd_input_map.metadata = metadata
207 # Create a temporary map to hold the count of bad pixels in each healpix pixel
208 self._ccd_input_pixels = self.ccd_input_map.valid_pixels
210 dtype = [(f"v{visit}", np.int64) for visit in self._bits_per_visit.keys()]
212 with warnings.catch_warnings():
213 # Healsparse will emit a warning if nside coverage is greater than
214 # 128. In the case of generating patch input maps, and not global
215 # maps, high nside coverage works fine, so we can suppress this
216 # warning.
217 warnings.simplefilter("ignore")
218 self._ccd_input_bad_count_map = hsp.HealSparseMap.make_empty(
219 nside_coverage=self.config.nside_coverage,
220 nside_sparse=self.config.nside,
221 dtype=dtype,
222 primary=dtype[0][0])
224 # Don't set input bad map if there are no ccds which overlap the bbox.
225 if len(self._ccd_input_pixels) > 0:
226 self._ccd_input_bad_count_map[self._ccd_input_pixels] = np.zeros(1, dtype=dtype)
228 def mask_warp_bbox(self, bbox, visit, mask, bit_mask_value):
229 """Mask a subregion from a visit.
230 This must be run after build_ccd_input_map initializes
231 the overall map.
233 Parameters
234 ----------
235 bbox : `lsst.geom.Box2I`
236 Bounding box from region to mask.
237 visit : `int`
238 Visit number corresponding to warp with mask.
239 mask : `lsst.afw.image.MaskX`
240 Mask plane from warp exposure.
241 bit_mask_value : `int`
242 Bit mask to check for bad pixels.
244 Raises
245 ------
246 RuntimeError : Raised if build_ccd_input_map was not run first.
247 """
248 if self.ccd_input_map is None:
249 raise RuntimeError("Must run build_ccd_input_map before mask_warp_bbox")
251 # Find the bad pixels and convert to healpix
252 bad_pixels = np.where(mask.array & bit_mask_value)
253 if len(bad_pixels[0]) == 0:
254 # No bad pixels
255 return
257 # Bad pixels come from warps which use the overall wcs.
258 bad_ra, bad_dec = self._wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64),
259 bad_pixels[0].astype(np.float64),
260 degrees=True)
261 bad_hpix = hpg.angle_to_pixel(self.config.nside, bad_ra, bad_dec)
263 # Count the number of bad image pixels in each healpix pixel
264 min_bad_hpix = bad_hpix.min()
265 bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32)
266 np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1)
268 # Add these to the accumulator map.
269 # We need to make sure that the "primary" array has valid values for
270 # this pixel to be registered in the accumulator map.
271 pix_to_add, = np.where(bad_hpix_count > 0)
272 count_map_arr = self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add]
273 primary = self._ccd_input_bad_count_map.primary
274 count_map_arr[primary] = np.clip(count_map_arr[primary], 0, None)
276 count_map_arr[f"v{visit}"] = np.clip(count_map_arr[f"v{visit}"], 0, None)
277 count_map_arr[f"v{visit}"] += bad_hpix_count[pix_to_add]
279 self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] = count_map_arr
281 def finalize_ccd_input_map_mask(self):
282 """Use accumulated mask information to finalize the masking of
283 ccd_input_map.
285 Raises
286 ------
287 RuntimeError : Raised if build_ccd_input_map was not run first.
288 """
289 if self.ccd_input_map is None:
290 raise RuntimeError("Must run build_ccd_input_map before finalize_ccd_input_map_mask.")
292 count_map_arr = self._ccd_input_bad_count_map[self._ccd_input_pixels]
293 for visit in self._bits_per_visit:
294 to_mask, = np.where(count_map_arr[f"v{visit}"] > self._min_bad)
295 if to_mask.size == 0:
296 continue
297 self.ccd_input_map.clear_bits_pix(self._ccd_input_pixels[to_mask],
298 self._bits_per_visit[visit])
300 # Clear memory
301 self._ccd_input_bad_count_map = None
303 def _pixels_to_radec(self, wcs, pixels):
304 """Convert pixels to ra/dec positions using a wcs.
306 Parameters
307 ----------
308 wcs : `lsst.afw.geom.SkyWcs`
309 WCS object.
310 pixels : `list` [`lsst.geom.Point2D`]
311 List of pixels to convert.
313 Returns
314 -------
315 radec : `numpy.ndarray`
316 Nx2 array of ra/dec positions associated with pixels.
317 """
318 sph_pts = wcs.pixelToSky(pixels)
319 return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees())
320 for sph in sph_pts])
323class HealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections,
324 dimensions=("tract", "band", "skymap",),
325 defaultTemplates={"coaddName": "deep",
326 "calexpType": ""}):
327 input_maps = pipeBase.connectionTypes.Input(
328 doc="Healsparse bit-wise coadd input maps",
329 name="{coaddName}Coadd_inputMap",
330 storageClass="HealSparseMap",
331 dimensions=("tract", "patch", "skymap", "band"),
332 multiple=True,
333 deferLoad=True,
334 )
335 coadd_exposures = pipeBase.connectionTypes.Input(
336 doc="Coadded exposures associated with input_maps",
337 name="{coaddName}Coadd",
338 storageClass="ExposureF",
339 dimensions=("tract", "patch", "skymap", "band"),
340 multiple=True,
341 deferLoad=True,
342 )
343 visit_summaries = pipeBase.connectionTypes.Input(
344 doc="Visit summary tables with aggregated statistics",
345 name="finalVisitSummary",
346 storageClass="ExposureCatalog",
347 dimensions=("instrument", "visit"),
348 multiple=True,
349 deferLoad=True,
350 )
351 sky_map = pipeBase.connectionTypes.Input(
352 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures",
353 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
354 storageClass="SkyMap",
355 dimensions=("skymap",),
356 )
358 # Create output connections for all possible maps defined in the
359 # registry. The vars() trick used here allows us to set class attributes
360 # programatically. Taken from
361 # https://stackoverflow.com/questions/2519807/
362 # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class
363 for name in BasePropertyMap.registry:
364 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Output(
365 doc=f"Minimum-value map of {name}",
366 name=f"{{coaddName}}Coadd_{name}_map_min",
367 storageClass="HealSparseMap",
368 dimensions=("tract", "skymap", "band"),
369 )
370 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Output(
371 doc=f"Maximum-value map of {name}",
372 name=f"{{coaddName}}Coadd_{name}_map_max",
373 storageClass="HealSparseMap",
374 dimensions=("tract", "skymap", "band"),
375 )
376 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Output(
377 doc=f"Mean-value map of {name}",
378 name=f"{{coaddName}}Coadd_{name}_map_mean",
379 storageClass="HealSparseMap",
380 dimensions=("tract", "skymap", "band"),
381 )
382 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output(
383 doc=f"Weighted mean-value map of {name}",
384 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean",
385 storageClass="HealSparseMap",
386 dimensions=("tract", "skymap", "band"),
387 )
388 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Output(
389 doc=f"Sum-value map of {name}",
390 name=f"{{coaddName}}Coadd_{name}_map_sum",
391 storageClass="HealSparseMap",
392 dimensions=("tract", "skymap", "band"),
393 )
395 def __init__(self, *, config=None):
396 super().__init__(config=config)
398 # Not all possible maps in the registry will be configured to run.
399 # Here we remove the unused connections.
400 for name in BasePropertyMap.registry:
401 if name not in config.property_maps:
402 prop_config = BasePropertyMapConfig()
403 prop_config.do_min = False
404 prop_config.do_max = False
405 prop_config.do_mean = False
406 prop_config.do_weighted_mean = False
407 prop_config.do_sum = False
408 else:
409 prop_config = config.property_maps[name]
411 if not prop_config.do_min:
412 self.outputs.remove(f"{name}_map_min")
413 if not prop_config.do_max:
414 self.outputs.remove(f"{name}_map_max")
415 if not prop_config.do_mean:
416 self.outputs.remove(f"{name}_map_mean")
417 if not prop_config.do_weighted_mean:
418 self.outputs.remove(f"{name}_map_weighted_mean")
419 if not prop_config.do_sum:
420 self.outputs.remove(f"{name}_map_sum")
423class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
424 pipelineConnections=HealSparsePropertyMapConnections):
425 """Configuration parameters for HealSparsePropertyMapTask"""
426 property_maps = BasePropertyMap.registry.makeField(
427 multi=True,
428 default=["exposure_time",
429 "psf_size",
430 "psf_e1",
431 "psf_e2",
432 "psf_maglim",
433 "sky_noise",
434 "sky_background",
435 "dcr_dra",
436 "dcr_ddec",
437 "dcr_e1",
438 "dcr_e2",
439 "epoch"],
440 doc="Property map computation objects",
441 )
443 def setDefaults(self):
444 self.property_maps["exposure_time"].do_sum = True
445 self.property_maps["psf_size"].do_weighted_mean = True
446 self.property_maps["psf_e1"].do_weighted_mean = True
447 self.property_maps["psf_e2"].do_weighted_mean = True
448 self.property_maps["psf_maglim"].do_weighted_mean = True
449 self.property_maps["sky_noise"].do_weighted_mean = True
450 self.property_maps["sky_background"].do_weighted_mean = True
451 self.property_maps["dcr_dra"].do_weighted_mean = True
452 self.property_maps["dcr_ddec"].do_weighted_mean = True
453 self.property_maps["dcr_e1"].do_weighted_mean = True
454 self.property_maps["dcr_e2"].do_weighted_mean = True
455 self.property_maps["epoch"].do_mean = True
456 self.property_maps["epoch"].do_min = True
457 self.property_maps["epoch"].do_max = True
460class HealSparsePropertyMapTask(pipeBase.PipelineTask):
461 """Task to compute Healsparse property maps.
463 This task will compute individual property maps (per tract, per
464 map type, per band). These maps cover the full coadd tract, and
465 are not truncated to the inner tract region.
466 """
467 ConfigClass = HealSparsePropertyMapConfig
468 _DefaultName = "healSparsePropertyMapTask"
470 def __init__(self, **kwargs):
471 super().__init__(**kwargs)
472 self.property_maps = PropertyMapMap()
473 for name, config, PropertyMapClass in self.config.property_maps.apply():
474 self.property_maps[name] = PropertyMapClass(config, name)
476 @timeMethod
477 def runQuantum(self, butlerQC, inputRefs, outputRefs):
478 inputs = butlerQC.get(inputRefs)
480 sky_map = inputs.pop("sky_map")
482 tract = butlerQC.quantum.dataId["tract"]
483 band = butlerQC.quantum.dataId["band"]
485 input_map_dict = {ref.dataId["patch"]: ref for ref in inputs["input_maps"]}
486 coadd_dict = {ref.dataId["patch"]: ref for ref in inputs["coadd_exposures"]}
488 visit_summary_dict = {ref.dataId["visit"]: ref.get()
489 for ref in inputs["visit_summaries"]}
491 self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict)
493 # Write the outputs
494 for name, property_map in self.property_maps.items():
495 if property_map.config.do_min:
496 butlerQC.put(property_map.min_map,
497 getattr(outputRefs, f"{name}_map_min"))
498 if property_map.config.do_max:
499 butlerQC.put(property_map.max_map,
500 getattr(outputRefs, f"{name}_map_max"))
501 if property_map.config.do_mean:
502 butlerQC.put(property_map.mean_map,
503 getattr(outputRefs, f"{name}_map_mean"))
504 if property_map.config.do_weighted_mean:
505 butlerQC.put(property_map.weighted_mean_map,
506 getattr(outputRefs, f"{name}_map_weighted_mean"))
507 if property_map.config.do_sum:
508 butlerQC.put(property_map.sum_map,
509 getattr(outputRefs, f"{name}_map_sum"))
511 def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict):
512 """Run the healsparse property task.
514 Parameters
515 ----------
516 sky_map : Sky map object
517 tract : `int`
518 Tract number.
519 band : `str`
520 Band name for logging.
521 coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
522 Dictionary of coadd exposure references. Keys are patch numbers.
523 input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
524 Dictionary of input map references. Keys are patch numbers.
525 visit_summary_dict : `dict` [`int`: `lsst.afw.table.ExposureCatalog`]
526 Dictionary of visit summary tables. Keys are visit numbers.
528 Raises
529 ------
530 RepeatableQuantumError
531 If visit_summary_dict is missing any visits or detectors found in an
532 input map. This leads to an inconsistency between what is in the coadd
533 (via the input map) and the visit summary tables which contain data
534 to compute the maps.
535 """
536 tract_info = sky_map[tract]
538 tract_maps_initialized = False
540 for patch in input_map_dict.keys():
541 self.log.info("Making maps for band %s, tract %d, patch %d.",
542 band, tract, patch)
544 patch_info = tract_info[patch]
546 input_map = input_map_dict[patch].get()
547 if input_map.valid_pixels.size == 0:
548 self.log.warning("No valid pixels for band %s, tract %d, patch %d; skipping.",
549 band, tract, patch)
550 coadd_photo_calib = coadd_dict[patch].get(component="photoCalib")
551 coadd_inputs = coadd_dict[patch].get(component="coaddInputs")
553 coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude())
555 # Crop input_map to the inner polygon of the patch
556 poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
557 patch_radec = self._vertices_to_radec(poly_vertices)
558 patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1],
559 value=np.arange(input_map.wide_mask_maxbits))
560 with warnings.catch_warnings():
561 # Healsparse will emit a warning if nside coverage is greater than
562 # 128. In the case of generating patch input maps, and not global
563 # maps, high nside coverage works fine, so we can suppress this
564 # warning.
565 warnings.simplefilter("ignore")
566 patch_poly_map = patch_poly.get_map_like(input_map)
567 input_map = hsp.and_intersection([input_map, patch_poly_map])
569 if not tract_maps_initialized:
570 # We use the first input map nside information to initialize
571 # the tract maps
572 nside_coverage = self._compute_nside_coverage_tract(tract_info)
573 nside = input_map.nside_sparse
575 do_compute_approx_psf = False
576 # Initialize the tract maps
577 for property_map in self.property_maps:
578 property_map.initialize_tract_maps(nside_coverage, nside)
579 if property_map.requires_psf:
580 do_compute_approx_psf = True
582 tract_maps_initialized = True
584 valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=True)
586 # Check if there are no valid pixels for the inner (unique) patch region
587 if valid_pixels.size == 0:
588 continue
590 # Initialize the value accumulators
591 for property_map in self.property_maps:
592 property_map.initialize_values(valid_pixels.size)
593 property_map.zeropoint = coadd_zeropoint
595 # Initialize the weight and counter accumulators
596 total_weights = np.zeros(valid_pixels.size)
597 total_inputs = np.zeros(valid_pixels.size, dtype=np.int32)
599 for bit, ccd_row in enumerate(coadd_inputs.ccds):
600 # Which pixels in the map are used by this visit/detector
601 inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit]))
603 # Check if there are any valid pixels in the map from this deteector.
604 if inmap.size == 0:
605 continue
607 # visit, detector_id, weight = input_dict[bit]
608 visit = ccd_row["visit"]
609 detector_id = ccd_row["ccd"]
610 weight = ccd_row["weight"]
612 x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=True)
613 scalings = self._compute_calib_scale(ccd_row, x, y)
615 if do_compute_approx_psf:
616 psf_array = compute_approx_psf_size_and_shape(ccd_row, vpix_ra[inmap], vpix_dec[inmap])
617 else:
618 psf_array = None
620 total_weights[inmap] += weight
621 total_inputs[inmap] += 1
623 # Retrieve the correct visitSummary row
624 if visit not in visit_summary_dict:
625 msg = f"Visit {visit} not found in visit_summaries."
626 raise pipeBase.RepeatableQuantumError(msg)
627 row = visit_summary_dict[visit].find(detector_id)
628 if row is None:
629 msg = f"Visit {visit} / detector_id {detector_id} not found in visit_summaries."
630 raise pipeBase.RepeatableQuantumError(msg)
632 # Accumulate the values
633 for property_map in self.property_maps:
634 property_map.accumulate_values(inmap,
635 vpix_ra[inmap],
636 vpix_dec[inmap],
637 weight,
638 scalings,
639 row,
640 psf_array=psf_array)
642 # Finalize the mean values and set the tract maps
643 for property_map in self.property_maps:
644 property_map.finalize_mean_values(total_weights, total_inputs)
645 property_map.set_map_values(valid_pixels)
647 def _compute_calib_scale(self, ccd_row, x, y):
648 """Compute calibration scaling values.
650 Parameters
651 ----------
652 ccd_row : `lsst.afw.table.ExposureRecord`
653 Exposure metadata for a given detector exposure.
654 x : `np.ndarray`
655 Array of x positions.
656 y : `np.ndarray`
657 Array of y positions.
659 Returns
660 -------
661 calib_scale : `np.ndarray`
662 Array of calibration scale values.
663 """
664 photo_calib = ccd_row.getPhotoCalib()
665 bf = photo_calib.computeScaledCalibration()
666 if bf.getBBox() == ccd_row.getBBox():
667 # Track variable calibration over the detector
668 calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y)
669 else:
670 # Spatially constant calibration
671 calib_scale = photo_calib.getCalibrationMean()
673 return calib_scale
675 def _vertices_to_radec(self, vertices):
676 """Convert polygon vertices to ra/dec.
678 Parameters
679 ----------
680 vertices : `list` [ `lsst.sphgeom.UnitVector3d` ]
681 Vertices for bounding polygon.
683 Returns
684 -------
685 radec : `numpy.ndarray`
686 Nx2 array of ra/dec positions (in degrees) associated with vertices.
687 """
688 lonlats = [lsst.sphgeom.LonLat(x) for x in vertices]
689 radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees()) for
690 x in lonlats])
691 return radec
693 def _compute_nside_coverage_tract(self, tract_info):
694 """Compute the optimal coverage nside for a tract.
696 Parameters
697 ----------
698 tract_info : `lsst.skymap.tractInfo.ExplicitTractInfo`
699 Tract information object.
701 Returns
702 -------
703 nside_coverage : `int`
704 Optimal coverage nside for a tract map.
705 """
706 num_patches = tract_info.getNumPatches()
708 # Compute approximate patch area
709 patch_info = tract_info.getPatchInfo(0)
710 vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
711 radec = self._vertices_to_radec(vertices)
712 delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0])
713 delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1])
714 patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1])))
716 tract_area = num_patches[0]*num_patches[1]*patch_area
717 # Start with a fairly low nside and increase until we find the approximate area.
718 nside_coverage_tract = 32
719 while hpg.nside_to_pixel_area(nside_coverage_tract, degrees=True) > tract_area:
720 nside_coverage_tract = 2*nside_coverage_tract
721 # Step back one, but don't go bigger pixels than nside=32 or smaller
722 # than 128 (recommended by healsparse).
723 nside_coverage_tract = int(np.clip(nside_coverage_tract/2, 32, 128))
725 return nside_coverage_tract
728class ConsolidateHealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections,
729 dimensions=("band", "skymap",),
730 defaultTemplates={"coaddName": "deep"}):
731 sky_map = pipeBase.connectionTypes.Input(
732 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures",
733 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
734 storageClass="SkyMap",
735 dimensions=("skymap",),
736 )
738 # Create output connections for all possible maps defined in the
739 # registry. The vars() trick used here allows us to set class attributes
740 # programatically. Taken from
741 # https://stackoverflow.com/questions/2519807/
742 # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class
743 for name in BasePropertyMap.registry:
744 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Input(
745 doc=f"Minimum-value map of {name}",
746 name=f"{{coaddName}}Coadd_{name}_map_min",
747 storageClass="HealSparseMap",
748 dimensions=("tract", "skymap", "band"),
749 multiple=True,
750 deferLoad=True,
751 )
752 vars()[f"{name}_consolidated_map_min"] = pipeBase.connectionTypes.Output(
753 doc=f"Minumum-value map of {name}",
754 name=f"{{coaddName}}Coadd_{name}_consolidated_map_min",
755 storageClass="HealSparseMap",
756 dimensions=("skymap", "band"),
757 )
758 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Input(
759 doc=f"Maximum-value map of {name}",
760 name=f"{{coaddName}}Coadd_{name}_map_max",
761 storageClass="HealSparseMap",
762 dimensions=("tract", "skymap", "band"),
763 multiple=True,
764 deferLoad=True,
765 )
766 vars()[f"{name}_consolidated_map_max"] = pipeBase.connectionTypes.Output(
767 doc=f"Minumum-value map of {name}",
768 name=f"{{coaddName}}Coadd_{name}_consolidated_map_max",
769 storageClass="HealSparseMap",
770 dimensions=("skymap", "band"),
771 )
772 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Input(
773 doc=f"Mean-value map of {name}",
774 name=f"{{coaddName}}Coadd_{name}_map_mean",
775 storageClass="HealSparseMap",
776 dimensions=("tract", "skymap", "band"),
777 multiple=True,
778 deferLoad=True,
779 )
780 vars()[f"{name}_consolidated_map_mean"] = pipeBase.connectionTypes.Output(
781 doc=f"Minumum-value map of {name}",
782 name=f"{{coaddName}}Coadd_{name}_consolidated_map_mean",
783 storageClass="HealSparseMap",
784 dimensions=("skymap", "band"),
785 )
786 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Input(
787 doc=f"Weighted mean-value map of {name}",
788 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean",
789 storageClass="HealSparseMap",
790 dimensions=("tract", "skymap", "band"),
791 multiple=True,
792 deferLoad=True,
793 )
794 vars()[f"{name}_consolidated_map_weighted_mean"] = pipeBase.connectionTypes.Output(
795 doc=f"Minumum-value map of {name}",
796 name=f"{{coaddName}}Coadd_{name}_consolidated_map_weighted_mean",
797 storageClass="HealSparseMap",
798 dimensions=("skymap", "band"),
799 )
800 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Input(
801 doc=f"Sum-value map of {name}",
802 name=f"{{coaddName}}Coadd_{name}_map_sum",
803 storageClass="HealSparseMap",
804 dimensions=("tract", "skymap", "band"),
805 multiple=True,
806 deferLoad=True,
807 )
808 vars()[f"{name}_consolidated_map_sum"] = pipeBase.connectionTypes.Output(
809 doc=f"Minumum-value map of {name}",
810 name=f"{{coaddName}}Coadd_{name}_consolidated_map_sum",
811 storageClass="HealSparseMap",
812 dimensions=("skymap", "band"),
813 )
815 def __init__(self, *, config=None):
816 super().__init__(config=config)
818 # Not all possible maps in the registry will be configured to run.
819 # Here we remove the unused connections.
820 for name in BasePropertyMap.registry:
821 if name not in config.property_maps:
822 prop_config = BasePropertyMapConfig()
823 prop_config.do_min = False
824 prop_config.do_max = False
825 prop_config.do_mean = False
826 prop_config.do_weighted_mean = False
827 prop_config.do_sum = False
828 else:
829 prop_config = config.property_maps[name]
831 if not prop_config.do_min:
832 self.inputs.remove(f"{name}_map_min")
833 self.outputs.remove(f"{name}_consolidated_map_min")
834 if not prop_config.do_max:
835 self.inputs.remove(f"{name}_map_max")
836 self.outputs.remove(f"{name}_consolidated_map_max")
837 if not prop_config.do_mean:
838 self.inputs.remove(f"{name}_map_mean")
839 self.outputs.remove(f"{name}_consolidated_map_mean")
840 if not prop_config.do_weighted_mean:
841 self.inputs.remove(f"{name}_map_weighted_mean")
842 self.outputs.remove(f"{name}_consolidated_map_weighted_mean")
843 if not prop_config.do_sum:
844 self.inputs.remove(f"{name}_map_sum")
845 self.outputs.remove(f"{name}_consolidated_map_sum")
848class ConsolidateHealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
849 pipelineConnections=ConsolidateHealSparsePropertyMapConnections):
850 """Configuration parameters for ConsolidateHealSparsePropertyMapTask"""
851 property_maps = BasePropertyMap.registry.makeField(
852 multi=True,
853 default=["exposure_time",
854 "psf_size",
855 "psf_e1",
856 "psf_e2",
857 "psf_maglim",
858 "sky_noise",
859 "sky_background",
860 "dcr_dra",
861 "dcr_ddec",
862 "dcr_e1",
863 "dcr_e2",
864 "epoch"],
865 doc="Property map computation objects",
866 )
867 nside_coverage = pexConfig.Field(
868 doc="Consolidated HealSparse coverage map nside. Must be power of 2.",
869 dtype=int,
870 default=32,
871 check=_is_power_of_two,
872 )
874 def setDefaults(self):
875 self.property_maps["exposure_time"].do_sum = True
876 self.property_maps["psf_size"].do_weighted_mean = True
877 self.property_maps["psf_e1"].do_weighted_mean = True
878 self.property_maps["psf_e2"].do_weighted_mean = True
879 self.property_maps["psf_maglim"].do_weighted_mean = True
880 self.property_maps["sky_noise"].do_weighted_mean = True
881 self.property_maps["sky_background"].do_weighted_mean = True
882 self.property_maps["dcr_dra"].do_weighted_mean = True
883 self.property_maps["dcr_ddec"].do_weighted_mean = True
884 self.property_maps["dcr_e1"].do_weighted_mean = True
885 self.property_maps["dcr_e2"].do_weighted_mean = True
886 self.property_maps["epoch"].do_mean = True
887 self.property_maps["epoch"].do_min = True
888 self.property_maps["epoch"].do_max = True
891class ConsolidateHealSparsePropertyMapTask(pipeBase.PipelineTask):
892 """Task to consolidate HealSparse property maps.
894 This task will take all the individual tract-based maps (per map type,
895 per band) and consolidate them into one survey-wide map (per map type,
896 per band). Each tract map is truncated to its inner region before
897 consolidation.
898 """
899 ConfigClass = ConsolidateHealSparsePropertyMapConfig
900 _DefaultName = "consolidateHealSparsePropertyMapTask"
902 def __init__(self, **kwargs):
903 super().__init__(**kwargs)
904 self.property_maps = PropertyMapMap()
905 for name, config, PropertyMapClass in self.config.property_maps.apply():
906 self.property_maps[name] = PropertyMapClass(config, name)
908 @timeMethod
909 def runQuantum(self, butlerQC, inputRefs, outputRefs):
910 inputs = butlerQC.get(inputRefs)
912 sky_map = inputs.pop("sky_map")
914 # These need to be consolidated one at a time to conserve memory.
915 for name in self.config.property_maps.names:
916 for type_ in ['min', 'max', 'mean', 'weighted_mean', 'sum']:
917 map_type = f"{name}_map_{type_}"
918 if map_type in inputs:
919 input_refs = {ref.dataId['tract']: ref
920 for ref in inputs[map_type]}
921 consolidated_map = self.consolidate_map(sky_map, input_refs)
922 butlerQC.put(consolidated_map,
923 getattr(outputRefs, f"{name}_consolidated_map_{type_}"))
925 def consolidate_map(self, sky_map, input_refs):
926 """Consolidate the healsparse property maps.
928 Parameters
929 ----------
930 sky_map : Sky map object
931 input_refs : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
932 Dictionary of tract_id mapping to dataref.
934 Returns
935 -------
936 consolidated_map : `healsparse.HealSparseMap`
937 Consolidated HealSparse map.
938 """
939 # First, we read in the coverage maps to know how much memory
940 # to allocate
941 cov_mask = None
942 nside_coverage_inputs = None
943 for tract_id in input_refs:
944 cov = input_refs[tract_id].get(component='coverage')
945 if cov_mask is None:
946 cov_mask = cov.coverage_mask
947 nside_coverage_inputs = cov.nside_coverage
948 else:
949 cov_mask |= cov.coverage_mask
951 cov_pix_inputs, = np.where(cov_mask)
953 # Compute the coverage pixels for the desired nside_coverage
954 if nside_coverage_inputs == self.config.nside_coverage:
955 cov_pix = cov_pix_inputs
956 elif nside_coverage_inputs > self.config.nside_coverage:
957 # Converting from higher resolution coverage to lower
958 # resolution coverage.
959 bit_shift = hsp.utils._compute_bitshift(self.config.nside_coverage,
960 nside_coverage_inputs)
961 cov_pix = np.right_shift(cov_pix_inputs, bit_shift)
962 else:
963 # Converting from lower resolution coverage to higher
964 # resolution coverage.
965 bit_shift = hsp.utils._compute_bitshift(nside_coverage_inputs,
966 self.config.nside_coverage)
967 cov_pix = np.left_shift(cov_pix_inputs, bit_shift)
969 # Now read in each tract map and build the consolidated map.
970 consolidated_map = None
971 for tract_id in input_refs:
972 input_map = input_refs[tract_id].get()
973 if consolidated_map is None:
974 consolidated_map = hsp.HealSparseMap.make_empty(
975 self.config.nside_coverage,
976 input_map.nside_sparse,
977 input_map.dtype,
978 sentinel=input_map._sentinel,
979 cov_pixels=cov_pix)
981 # Only use pixels that are properly inside the tract.
982 vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=True)
983 vpix_tract_ids = sky_map.findTractIdArray(ra, dec, degrees=True)
985 in_tract = (vpix_tract_ids == tract_id)
987 consolidated_map[vpix[in_tract]] = input_map[vpix[in_tract]]
989 return consolidated_map