22__all__ = [
"HealSparseInputMapTask",
"HealSparseInputMapConfig",
23 "HealSparseMapFormatter",
"HealSparsePropertyMapConnections",
24 "HealSparsePropertyMapConfig",
"HealSparsePropertyMapTask",
25 "ConsolidateHealSparsePropertyMapConnections",
26 "ConsolidateHealSparsePropertyMapConfig",
27 "ConsolidateHealSparsePropertyMapTask"]
29from collections
import defaultdict
34import healsparse
as hsp
40from lsst.daf.butler
import Formatter
42from lsst.utils.timer
import timeMethod
43from .healSparseMappingProperties
import (BasePropertyMap, BasePropertyMapConfig,
44 PropertyMapMap, compute_approx_psf_size_and_shape)
48 """Interface for reading and writing healsparse.HealSparseMap files."""
49 unsupportedParameters = frozenset()
50 supportedExtensions = frozenset({
".hsp",
".fit",
".fits"})
53 def read(self, component=None):
55 path = self.fileDescriptor.location.path
57 if component ==
'coverage':
59 data = hsp.HealSparseCoverage.read(path)
60 except (OSError, RuntimeError):
61 raise ValueError(f
"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
65 if self.fileDescriptor.parameters
is None:
69 pixels = self.fileDescriptor.parameters.get(
'pixels',
None)
70 degrade_nside = self.fileDescriptor.parameters.get(
'degrade_nside',
None)
72 data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside)
73 except (OSError, RuntimeError):
74 raise ValueError(f
"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
78 def write(self, inMemoryDataset):
81 self.fileDescriptor.location.updateExtension(self.
extension)
82 inMemoryDataset.write(self.fileDescriptor.location.path, clobber=
True)
85def _is_power_of_two(value):
86 """Check that value is a power of two.
95 is_power_of_two : `bool`
96 True if value
is a power of two;
False otherwise,
or
97 if value
is not an integer.
99 if not isinstance(value, numbers.Integral):
106 return (value & (value - 1) == 0)
and value != 0
110 """Configuration parameters for HealSparseInputMapTask"""
111 nside = pexConfig.Field(
112 doc=
"Mapping healpix nside. Must be power of 2.",
115 check=_is_power_of_two,
117 nside_coverage = pexConfig.Field(
118 doc=
"HealSparse coverage map nside. Must be power of 2.",
121 check=_is_power_of_two,
123 bad_mask_min_coverage = pexConfig.Field(
124 doc=(
"Minimum area fraction of a map healpixel pixel that must be "
125 "covered by bad pixels to be removed from the input map. "
126 "This is approximate."),
133 """Task for making a HealSparse input map."""
135 ConfigClass = HealSparseInputMapConfig
136 _DefaultName =
"healSparseInputMap"
139 pipeBase.Task.__init__(self, **kwargs)
144 """Build a map from ccd valid polygons or bounding boxes.
149 Bounding box for region to build input map.
151 WCS object
for region to build input map.
153 Exposure catalog
with ccd data
from coadd inputs.
155 with warnings.catch_warnings():
160 warnings.simplefilter(
"ignore")
161 self.
ccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage,
162 nside_sparse=self.config.nside,
164 wide_mask_maxbits=len(ccds))
169 pixel_scale = wcs.getPixelScale().asArcseconds()
170 hpix_area_arcsec2 = hpg.nside_to_pixel_area(self.config.nside, degrees=
True)*(3600.**2.)
171 self.
_min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.)
176 for bit, ccd_row
in enumerate(ccds):
177 metadata[f
"B{bit:04d}CCD"] = ccd_row[
"ccd"]
178 metadata[f
"B{bit:04d}VIS"] = ccd_row[
"visit"]
179 metadata[f
"B{bit:04d}WT"] = ccd_row[
"weight"]
184 ccd_poly = ccd_row.getValidPolygon()
188 ccd_poly_radec = self.
_pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices())
191 poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0],
192 dec=ccd_poly_radec[: -1, 1],
200 bbox_afw_poly.convexHull().getVertices())
201 bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1],
210 dtype = [(f
"v{visit}", np.int64)
for visit
in self.
_bits_per_visit.keys()]
212 with warnings.catch_warnings():
217 warnings.simplefilter(
"ignore")
219 nside_coverage=self.config.nside_coverage,
220 nside_sparse=self.config.nside,
229 """Mask a subregion from a visit.
230 This must be run after build_ccd_input_map initializes
236 Bounding box from region to mask.
238 Visit number corresponding to warp
with mask.
239 mask : `lsst.afw.image.MaskX`
240 Mask plane
from warp exposure.
241 bit_mask_value : `int`
242 Bit mask to check
for bad pixels.
246 RuntimeError : Raised
if build_ccd_input_map was
not run first.
249 raise RuntimeError(
"Must run build_ccd_input_map before mask_warp_bbox")
252 bad_pixels = np.where(mask.array & bit_mask_value)
253 if len(bad_pixels[0]) == 0:
258 bad_ra, bad_dec = self.
_wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64),
259 bad_pixels[0].astype(np.float64),
261 bad_hpix = hpg.angle_to_pixel(self.config.nside, bad_ra, bad_dec)
264 min_bad_hpix = bad_hpix.min()
265 bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32)
266 np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1)
271 pix_to_add, = np.where(bad_hpix_count > 0)
274 count_map_arr[primary] = np.clip(count_map_arr[primary], 0,
None)
276 count_map_arr[f
"v{visit}"] = np.clip(count_map_arr[f
"v{visit}"], 0,
None)
277 count_map_arr[f
"v{visit}"] += bad_hpix_count[pix_to_add]
282 """Use accumulated mask information to finalize the masking of
287 RuntimeError : Raised if build_ccd_input_map was
not run first.
290 raise RuntimeError(
"Must run build_ccd_input_map before finalize_ccd_input_map_mask.")
294 to_mask, = np.where(count_map_arr[f
"v{visit}"] > self.
_min_bad)
295 if to_mask.size == 0:
303 def _pixels_to_radec(self, wcs, pixels):
304 """Convert pixels to ra/dec positions using a wcs.
311 List of pixels to convert.
315 radec : `numpy.ndarray`
316 Nx2 array of ra/dec positions associated with pixels.
318 sph_pts = wcs.pixelToSky(pixels)
319 return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees())
324 dimensions=(
"tract",
"band",
"skymap",),
325 defaultTemplates={
"coaddName":
"deep",
327 input_maps = pipeBase.connectionTypes.Input(
328 doc=
"Healsparse bit-wise coadd input maps",
329 name=
"{coaddName}Coadd_inputMap",
330 storageClass=
"HealSparseMap",
331 dimensions=(
"tract",
"patch",
"skymap",
"band"),
335 coadd_exposures = pipeBase.connectionTypes.Input(
336 doc=
"Coadded exposures associated with input_maps",
337 name=
"{coaddName}Coadd",
338 storageClass=
"ExposureF",
339 dimensions=(
"tract",
"patch",
"skymap",
"band"),
343 visit_summaries = pipeBase.connectionTypes.Input(
344 doc=
"Visit summary tables with aggregated statistics",
345 name=
"{calexpType}visitSummary",
346 storageClass=
"ExposureCatalog",
347 dimensions=(
"instrument",
"visit"),
351 sky_map = pipeBase.connectionTypes.Input(
352 doc=
"Input definition of geometry/bbox and projection/wcs for coadded exposures",
353 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
354 storageClass=
"SkyMap",
355 dimensions=(
"skymap",),
363 for name
in BasePropertyMap.registry:
364 vars()[f
"{name}_map_min"] = pipeBase.connectionTypes.Output(
365 doc=f
"Minimum-value map of {name}",
366 name=f
"{{coaddName}}Coadd_{name}_map_min",
367 storageClass=
"HealSparseMap",
368 dimensions=(
"tract",
"skymap",
"band"),
370 vars()[f
"{name}_map_max"] = pipeBase.connectionTypes.Output(
371 doc=f
"Maximum-value map of {name}",
372 name=f
"{{coaddName}}Coadd_{name}_map_max",
373 storageClass=
"HealSparseMap",
374 dimensions=(
"tract",
"skymap",
"band"),
376 vars()[f
"{name}_map_mean"] = pipeBase.connectionTypes.Output(
377 doc=f
"Mean-value map of {name}",
378 name=f
"{{coaddName}}Coadd_{name}_map_mean",
379 storageClass=
"HealSparseMap",
380 dimensions=(
"tract",
"skymap",
"band"),
382 vars()[f
"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output(
383 doc=f
"Weighted mean-value map of {name}",
384 name=f
"{{coaddName}}Coadd_{name}_map_weighted_mean",
385 storageClass=
"HealSparseMap",
386 dimensions=(
"tract",
"skymap",
"band"),
388 vars()[f
"{name}_map_sum"] = pipeBase.connectionTypes.Output(
389 doc=f
"Sum-value map of {name}",
390 name=f
"{{coaddName}}Coadd_{name}_map_sum",
391 storageClass=
"HealSparseMap",
392 dimensions=(
"tract",
"skymap",
"band"),
395 def __init__(self, *, config=None):
396 super().__init__(config=config)
400 for name
in BasePropertyMap.registry:
401 if name
not in config.property_maps:
403 prop_config.do_min =
False
404 prop_config.do_max =
False
405 prop_config.do_mean =
False
406 prop_config.do_weighted_mean =
False
407 prop_config.do_sum =
False
409 prop_config = config.property_maps[name]
411 if not prop_config.do_min:
412 self.outputs.remove(f
"{name}_map_min")
413 if not prop_config.do_max:
414 self.outputs.remove(f
"{name}_map_max")
415 if not prop_config.do_mean:
416 self.outputs.remove(f
"{name}_map_mean")
417 if not prop_config.do_weighted_mean:
418 self.outputs.remove(f
"{name}_map_weighted_mean")
419 if not prop_config.do_sum:
420 self.outputs.remove(f
"{name}_map_sum")
423class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
424 pipelineConnections=HealSparsePropertyMapConnections):
425 """Configuration parameters for HealSparsePropertyMapTask"""
426 property_maps = BasePropertyMap.registry.makeField(
428 default=[
"exposure_time",
439 doc=
"Property map computation objects",
443 self.property_maps[
"exposure_time"].do_sum =
True
444 self.property_maps[
"psf_size"].do_weighted_mean =
True
445 self.property_maps[
"psf_e1"].do_weighted_mean =
True
446 self.property_maps[
"psf_e2"].do_weighted_mean =
True
447 self.property_maps[
"psf_maglim"].do_weighted_mean =
True
448 self.property_maps[
"sky_noise"].do_weighted_mean =
True
449 self.property_maps[
"sky_background"].do_weighted_mean =
True
450 self.property_maps[
"dcr_dra"].do_weighted_mean =
True
451 self.property_maps[
"dcr_ddec"].do_weighted_mean =
True
452 self.property_maps[
"dcr_e1"].do_weighted_mean =
True
453 self.property_maps[
"dcr_e2"].do_weighted_mean =
True
456class HealSparsePropertyMapTask(pipeBase.PipelineTask):
457 """Task to compute Healsparse property maps.
459 This task will compute individual property maps (per tract, per
460 map type, per band). These maps cover the full coadd tract, and
461 are
not truncated to the inner tract region.
463 ConfigClass = HealSparsePropertyMapConfig
464 _DefaultName = "healSparsePropertyMapTask"
466 def __init__(self, **kwargs):
467 super().__init__(**kwargs)
469 for name, config, PropertyMapClass
in self.config.property_maps.apply():
470 self.property_maps[name] = PropertyMapClass(config, name)
473 def runQuantum(self, butlerQC, inputRefs, outputRefs):
474 inputs = butlerQC.get(inputRefs)
476 sky_map = inputs.pop(
"sky_map")
478 tract = butlerQC.quantum.dataId[
"tract"]
479 band = butlerQC.quantum.dataId[
"band"]
481 input_map_dict = {ref.dataId[
"patch"]: ref
for ref
in inputs[
"input_maps"]}
482 coadd_dict = {ref.dataId[
"patch"]: ref
for ref
in inputs[
"coadd_exposures"]}
484 visit_summary_dict = {ref.dataId[
"visit"]: ref.get()
485 for ref
in inputs[
"visit_summaries"]}
487 self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict)
490 for name, property_map
in self.property_maps.items():
491 if property_map.config.do_min:
492 butlerQC.put(property_map.min_map,
493 getattr(outputRefs, f
"{name}_map_min"))
494 if property_map.config.do_max:
495 butlerQC.put(property_map.max_map,
496 getattr(outputRefs, f
"{name}_map_max"))
497 if property_map.config.do_mean:
498 butlerQC.put(property_map.mean_map,
499 getattr(outputRefs, f
"{name}_map_mean"))
500 if property_map.config.do_weighted_mean:
501 butlerQC.put(property_map.weighted_mean_map,
502 getattr(outputRefs, f
"{name}_map_weighted_mean"))
503 if property_map.config.do_sum:
504 butlerQC.put(property_map.sum_map,
505 getattr(outputRefs, f
"{name}_map_sum"))
507 def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict):
508 """Run the healsparse property task.
512 sky_map : Sky map object
516 Band name for logging.
517 coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
518 Dictionary of coadd exposure references. Keys are patch numbers.
519 input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
520 Dictionary of input map references. Keys are patch numbers.
522 Dictionary of visit summary tables. Keys are visit numbers.
526 RepeatableQuantumError
527 If visit_summary_dict
is missing any visits
or detectors found
in an
528 input map. This leads to an inconsistency between what
is in the coadd
529 (via the input map)
and the visit summary tables which contain data
532 tract_info = sky_map[tract]
534 tract_maps_initialized = False
536 for patch
in input_map_dict.keys():
537 self.log.info(
"Making maps for band %s, tract %d, patch %d.",
540 patch_info = tract_info[patch]
542 input_map = input_map_dict[patch].get()
543 coadd_photo_calib = coadd_dict[patch].get(component=
"photoCalib")
544 coadd_inputs = coadd_dict[patch].get(component=
"coaddInputs")
546 coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude())
549 poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
550 patch_radec = self._vertices_to_radec(poly_vertices)
551 patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1],
552 value=np.arange(input_map.wide_mask_maxbits))
553 patch_poly_map = patch_poly.get_map_like(input_map)
554 input_map = hsp.and_intersection([input_map, patch_poly_map])
556 if not tract_maps_initialized:
559 nside_coverage = self._compute_nside_coverage_tract(tract_info)
560 nside = input_map.nside_sparse
562 do_compute_approx_psf =
False
564 for property_map
in self.property_maps:
565 property_map.initialize_tract_maps(nside_coverage, nside)
566 if property_map.requires_psf:
567 do_compute_approx_psf =
True
569 tract_maps_initialized =
True
571 valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=
True)
574 if valid_pixels.size == 0:
578 for property_map
in self.property_maps:
579 property_map.initialize_values(valid_pixels.size)
580 property_map.zeropoint = coadd_zeropoint
583 total_weights = np.zeros(valid_pixels.size)
584 total_inputs = np.zeros(valid_pixels.size, dtype=np.int32)
586 for bit, ccd_row
in enumerate(coadd_inputs.ccds):
588 inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit]))
595 visit = ccd_row[
"visit"]
596 detector_id = ccd_row[
"ccd"]
597 weight = ccd_row[
"weight"]
599 x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=
True)
600 scalings = self._compute_calib_scale(ccd_row, x, y)
602 if do_compute_approx_psf:
607 total_weights[inmap] += weight
608 total_inputs[inmap] += 1
611 if visit
not in visit_summary_dict:
612 msg = f
"Visit {visit} not found in visit_summaries."
613 raise pipeBase.RepeatableQuantumError(msg)
614 row = visit_summary_dict[visit].find(detector_id)
616 msg = f
"Visit {visit} / detector_id {detector_id} not found in visit_summaries."
617 raise pipeBase.RepeatableQuantumError(msg)
620 for property_map
in self.property_maps:
621 property_map.accumulate_values(inmap,
630 for property_map
in self.property_maps:
631 property_map.finalize_mean_values(total_weights, total_inputs)
632 property_map.set_map_values(valid_pixels)
634 def _compute_calib_scale(self, ccd_row, x, y):
635 """Compute calibration scaling values.
640 Exposure metadata for a given detector exposure.
642 Array of x positions.
644 Array of y positions.
648 calib_scale : `np.ndarray`
649 Array of calibration scale values.
651 photo_calib = ccd_row.getPhotoCalib()
652 bf = photo_calib.computeScaledCalibration()
653 if bf.getBBox() == ccd_row.getBBox():
655 calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y)
658 calib_scale = photo_calib.getCalibrationMean()
662 def _vertices_to_radec(self, vertices):
663 """Convert polygon vertices to ra/dec.
668 Vertices for bounding polygon.
672 radec : `numpy.ndarray`
673 Nx2 array of ra/dec positions (
in degrees) associated
with vertices.
676 radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees())
for
680 def _compute_nside_coverage_tract(self, tract_info):
681 """Compute the optimal coverage nside for a tract.
686 Tract information object.
690 nside_coverage : `int`
691 Optimal coverage nside for a tract map.
693 num_patches = tract_info.getNumPatches()
696 patch_info = tract_info.getPatchInfo(0)
697 vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
698 radec = self._vertices_to_radec(vertices)
699 delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0])
700 delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1])
701 patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1])))
703 tract_area = num_patches[0]*num_patches[1]*patch_area
705 nside_coverage_tract = 32
706 while hpg.nside_to_pixel_area(nside_coverage_tract, degrees=
True) > tract_area:
707 nside_coverage_tract = 2*nside_coverage_tract
710 nside_coverage_tract = int(np.clip(nside_coverage_tract/2, 32, 128))
712 return nside_coverage_tract
715class ConsolidateHealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections,
716 dimensions=(
"band",
"skymap",),
717 defaultTemplates={
"coaddName":
"deep"}):
718 sky_map = pipeBase.connectionTypes.Input(
719 doc=
"Input definition of geometry/bbox and projection/wcs for coadded exposures",
720 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
721 storageClass=
"SkyMap",
722 dimensions=(
"skymap",),
730 for name
in BasePropertyMap.registry:
731 vars()[f
"{name}_map_min"] = pipeBase.connectionTypes.Input(
732 doc=f
"Minimum-value map of {name}",
733 name=f
"{{coaddName}}Coadd_{name}_map_min",
734 storageClass=
"HealSparseMap",
735 dimensions=(
"tract",
"skymap",
"band"),
739 vars()[f
"{name}_consolidated_map_min"] = pipeBase.connectionTypes.Output(
740 doc=f
"Minumum-value map of {name}",
741 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_min",
742 storageClass=
"HealSparseMap",
743 dimensions=(
"skymap",
"band"),
745 vars()[f
"{name}_map_max"] = pipeBase.connectionTypes.Input(
746 doc=f
"Maximum-value map of {name}",
747 name=f
"{{coaddName}}Coadd_{name}_map_max",
748 storageClass=
"HealSparseMap",
749 dimensions=(
"tract",
"skymap",
"band"),
753 vars()[f
"{name}_consolidated_map_max"] = pipeBase.connectionTypes.Output(
754 doc=f
"Minumum-value map of {name}",
755 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_max",
756 storageClass=
"HealSparseMap",
757 dimensions=(
"skymap",
"band"),
759 vars()[f
"{name}_map_mean"] = pipeBase.connectionTypes.Input(
760 doc=f
"Mean-value map of {name}",
761 name=f
"{{coaddName}}Coadd_{name}_map_mean",
762 storageClass=
"HealSparseMap",
763 dimensions=(
"tract",
"skymap",
"band"),
767 vars()[f
"{name}_consolidated_map_mean"] = pipeBase.connectionTypes.Output(
768 doc=f
"Minumum-value map of {name}",
769 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_mean",
770 storageClass=
"HealSparseMap",
771 dimensions=(
"skymap",
"band"),
773 vars()[f
"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Input(
774 doc=f
"Weighted mean-value map of {name}",
775 name=f
"{{coaddName}}Coadd_{name}_map_weighted_mean",
776 storageClass=
"HealSparseMap",
777 dimensions=(
"tract",
"skymap",
"band"),
781 vars()[f
"{name}_consolidated_map_weighted_mean"] = pipeBase.connectionTypes.Output(
782 doc=f
"Minumum-value map of {name}",
783 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_weighted_mean",
784 storageClass=
"HealSparseMap",
785 dimensions=(
"skymap",
"band"),
787 vars()[f
"{name}_map_sum"] = pipeBase.connectionTypes.Input(
788 doc=f
"Sum-value map of {name}",
789 name=f
"{{coaddName}}Coadd_{name}_map_sum",
790 storageClass=
"HealSparseMap",
791 dimensions=(
"tract",
"skymap",
"band"),
795 vars()[f
"{name}_consolidated_map_sum"] = pipeBase.connectionTypes.Output(
796 doc=f
"Minumum-value map of {name}",
797 name=f
"{{coaddName}}Coadd_{name}_consolidated_map_sum",
798 storageClass=
"HealSparseMap",
799 dimensions=(
"skymap",
"band"),
802 def __init__(self, *, config=None):
803 super().__init__(config=config)
807 for name
in BasePropertyMap.registry:
808 if name
not in config.property_maps:
810 prop_config.do_min =
False
811 prop_config.do_max =
False
812 prop_config.do_mean =
False
813 prop_config.do_weighted_mean =
False
814 prop_config.do_sum =
False
816 prop_config = config.property_maps[name]
818 if not prop_config.do_min:
819 self.inputs.remove(f
"{name}_map_min")
820 self.outputs.remove(f
"{name}_consolidated_map_min")
821 if not prop_config.do_max:
822 self.inputs.remove(f
"{name}_map_max")
823 self.outputs.remove(f
"{name}_consolidated_map_max")
824 if not prop_config.do_mean:
825 self.inputs.remove(f
"{name}_map_mean")
826 self.outputs.remove(f
"{name}_consolidated_map_mean")
827 if not prop_config.do_weighted_mean:
828 self.inputs.remove(f
"{name}_map_weighted_mean")
829 self.outputs.remove(f
"{name}_consolidated_map_weighted_mean")
830 if not prop_config.do_sum:
831 self.inputs.remove(f
"{name}_map_sum")
832 self.outputs.remove(f
"{name}_consolidated_map_sum")
835class ConsolidateHealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
836 pipelineConnections=ConsolidateHealSparsePropertyMapConnections):
837 """Configuration parameters for ConsolidateHealSparsePropertyMapTask"""
838 property_maps = BasePropertyMap.registry.makeField(
840 default=[
"exposure_time",
851 doc=
"Property map computation objects",
853 nside_coverage = pexConfig.Field(
854 doc=
"Consolidated HealSparse coverage map nside. Must be power of 2.",
857 check=_is_power_of_two,
861 self.property_maps[
"exposure_time"].do_sum =
True
862 self.property_maps[
"psf_size"].do_weighted_mean =
True
863 self.property_maps[
"psf_e1"].do_weighted_mean =
True
864 self.property_maps[
"psf_e2"].do_weighted_mean =
True
865 self.property_maps[
"psf_maglim"].do_weighted_mean =
True
866 self.property_maps[
"sky_noise"].do_weighted_mean =
True
867 self.property_maps[
"sky_background"].do_weighted_mean =
True
868 self.property_maps[
"dcr_dra"].do_weighted_mean =
True
869 self.property_maps[
"dcr_ddec"].do_weighted_mean =
True
870 self.property_maps[
"dcr_e1"].do_weighted_mean =
True
871 self.property_maps[
"dcr_e2"].do_weighted_mean =
True
874class ConsolidateHealSparsePropertyMapTask(pipeBase.PipelineTask):
875 """Task to consolidate HealSparse property maps.
877 This task will take all the individual tract-based maps (per map type,
878 per band) and consolidate them into one survey-wide map (per map type,
879 per band). Each tract map
is truncated to its inner region before
882 ConfigClass = ConsolidateHealSparsePropertyMapConfig
883 _DefaultName = "consolidateHealSparsePropertyMapTask"
885 def __init__(self, **kwargs):
886 super().__init__(**kwargs)
888 for name, config, PropertyMapClass
in self.config.property_maps.apply():
889 self.property_maps[name] = PropertyMapClass(config, name)
892 def runQuantum(self, butlerQC, inputRefs, outputRefs):
893 inputs = butlerQC.get(inputRefs)
895 sky_map = inputs.pop(
"sky_map")
898 for name
in self.config.property_maps.names:
899 for type_
in [
'min',
'max',
'mean',
'weighted_mean',
'sum']:
900 map_type = f
"{name}_map_{type_}"
901 if map_type
in inputs:
902 input_refs = {ref.dataId[
'tract']: ref
903 for ref
in inputs[map_type]}
904 consolidated_map = self.consolidate_map(sky_map, input_refs)
905 butlerQC.put(consolidated_map,
906 getattr(outputRefs, f
"{name}_consolidated_map_{type_}"))
908 def consolidate_map(self, sky_map, input_refs):
909 """Consolidate the healsparse property maps.
913 sky_map : Sky map object
914 input_refs : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
915 Dictionary of tract_id mapping to dataref.
919 consolidated_map : `healsparse.HealSparseMap`
920 Consolidated HealSparse map.
925 nside_coverage_inputs =
None
926 for tract_id
in input_refs:
927 cov = input_refs[tract_id].get(component=
'coverage')
929 cov_mask = cov.coverage_mask
930 nside_coverage_inputs = cov.nside_coverage
932 cov_mask |= cov.coverage_mask
934 cov_pix_inputs, = np.where(cov_mask)
937 if nside_coverage_inputs == self.config.nside_coverage:
938 cov_pix = cov_pix_inputs
939 elif nside_coverage_inputs > self.config.nside_coverage:
942 bit_shift = hsp.utils._compute_bitshift(self.config.nside_coverage,
943 nside_coverage_inputs)
944 cov_pix = np.right_shift(cov_pix_inputs, bit_shift)
948 bit_shift = hsp.utils._compute_bitshift(nside_coverage_inputs,
949 self.config.nside_coverage)
950 cov_pix = np.left_shift(cov_pix_inputs, bit_shift)
953 consolidated_map =
None
954 for tract_id
in input_refs:
955 input_map = input_refs[tract_id].get()
956 if consolidated_map
is None:
957 consolidated_map = hsp.HealSparseMap.make_empty(
958 self.config.nside_coverage,
959 input_map.nside_sparse,
961 sentinel=input_map._sentinel,
965 vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=
True)
966 vpix_tract_ids = sky_map.findTractIdArray(ra, dec, degrees=
True)
968 in_tract = (vpix_tract_ids == tract_id)
970 consolidated_map[vpix[in_tract]] = input_map[vpix[in_tract]]
972 return consolidated_map
def compute_approx_psf_size_and_shape(ccd_row, ra, dec, nx=20, ny=20, orderx=2, ordery=2)