lsst.pipe.tasks  21.0.0-156-g24bfad28+d73ae808d5
healSparseMapping.py
Go to the documentation of this file.
1 #
2 # LSST Data Management System
3 # Copyright 2008-2021 AURA/LSST.
4 #
5 # This product includes software developed by the
6 # LSST Project (http://www.lsst.org/).
7 #
8 # This program is free software: you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation, either version 3 of the License, or
11 # (at your option) any later version.
12 #
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
17 #
18 # You should have received a copy of the LSST License Statement and
19 # the GNU General Public License along with this program. If not,
20 # see <http://www.lsstcorp.org/LegalNotices/>.
21 #
22 from collections import defaultdict
23 import numbers
24 import numpy as np
25 import healpy as hp
26 import healsparse as hsp
27 
28 import lsst.pex.config as pexConfig
29 import lsst.pipe.base as pipeBase
30 import lsst.geom
31 import lsst.afw.geom as afwGeom
32 from lsst.daf.butler import Formatter
33 from lsst.skymap import BaseSkyMap
34 from lsst.utils.timer import timeMethod
35 from .healSparseMappingProperties import (BasePropertyMap, BasePropertyMapConfig,
36  PropertyMapMap, compute_approx_psf_size_and_shape)
37 
38 
39 __all__ = ["HealSparseInputMapTask", "HealSparseInputMapConfig",
40  "HealSparseMapFormatter", "HealSparsePropertyMapConnections",
41  "HealSparsePropertyMapConfig", "HealSparsePropertyMapTask",
42  "ConsolidateHealSparsePropertyMapConnections",
43  "ConsolidateHealSparsePropertyMapConfig",
44  "ConsolidateHealSparsePropertyMapTask"]
45 
46 
47 class HealSparseMapFormatter(Formatter):
48  """Interface for reading and writing healsparse.HealSparseMap files."""
49  unsupportedParameters = frozenset()
50  supportedExtensions = frozenset({".hsp", ".fit", ".fits"})
51  extension = '.hsp'
52 
53  def read(self, component=None):
54  # Docstring inherited from Formatter.read.
55  path = self.fileDescriptor.location.path
56 
57  if component == 'coverage':
58  try:
59  data = hsp.HealSparseCoverage.read(path)
60  except (OSError, RuntimeError):
61  raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
62 
63  return data
64 
65  if self.fileDescriptor.parameters is None:
66  pixels = None
67  degrade_nside = None
68  else:
69  pixels = self.fileDescriptor.parameters.get('pixels', None)
70  degrade_nside = self.fileDescriptor.parameters.get('degrade_nside', None)
71  try:
72  data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside)
73  except (OSError, RuntimeError):
74  raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}")
75 
76  return data
77 
78  def write(self, inMemoryDataset):
79  # Docstring inherited from Formatter.write.
80  # Update the location with the formatter-preferred file extension
81  self.fileDescriptor.location.updateExtension(self.extensionextension)
82  inMemoryDataset.write(self.fileDescriptor.location.path, clobber=True)
83 
84 
85 def _is_power_of_two(value):
86  """Check that value is a power of two.
87 
88  Parameters
89  ----------
90  value : `int`
91  Value to check.
92 
93  Returns
94  -------
95  is_power_of_two : `bool`
96  True if value is a power of two; False otherwise, or
97  if value is not an integer.
98  """
99  if not isinstance(value, numbers.Integral):
100  return False
101 
102  # See https://stackoverflow.com/questions/57025836
103  # Every power of 2 has exactly 1 bit set to 1; subtracting
104  # 1 therefore flips every preceding bit. If you and that
105  # together with the original value it must be 0.
106  return (value & (value - 1) == 0) and value != 0
107 
108 
109 class HealSparseInputMapConfig(pexConfig.Config):
110  """Configuration parameters for HealSparseInputMapTask"""
111  nside = pexConfig.Field(
112  doc="Mapping healpix nside. Must be power of 2.",
113  dtype=int,
114  default=32768,
115  check=_is_power_of_two,
116  )
117  nside_coverage = pexConfig.Field(
118  doc="HealSparse coverage map nside. Must be power of 2.",
119  dtype=int,
120  default=256,
121  check=_is_power_of_two,
122  )
123  bad_mask_min_coverage = pexConfig.Field(
124  doc=("Minimum area fraction of a map healpixel pixel that must be "
125  "covered by bad pixels to be removed from the input map. "
126  "This is approximate."),
127  dtype=float,
128  default=0.5,
129  )
130 
131 
132 class HealSparseInputMapTask(pipeBase.Task):
133  """Task for making a HealSparse input map."""
134 
135  ConfigClass = HealSparseInputMapConfig
136  _DefaultName = "healSparseInputMap"
137 
138  def __init__(self, **kwargs):
139  pipeBase.Task.__init__(self, **kwargs)
140 
141  self.ccd_input_mapccd_input_map = None
142 
143  def build_ccd_input_map(self, bbox, wcs, ccds):
144  """Build a map from ccd valid polygons or bounding boxes.
145 
146  Parameters
147  ----------
148  bbox : `lsst.geom.Box2I`
149  Bounding box for region to build input map.
150  wcs : `lsst.afw.geom.SkyWcs`
151  WCS object for region to build input map.
152  ccds : `lsst.afw.table.ExposureCatalog`
153  Exposure catalog with ccd data from coadd inputs.
154  """
155  self.ccd_input_mapccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage,
156  nside_sparse=self.config.nside,
157  dtype=hsp.WIDE_MASK,
158  wide_mask_maxbits=len(ccds))
159  self._wcs_wcs = wcs
160  self._bbox_bbox = bbox
161  self._ccds_ccds = ccds
162 
163  pixel_scale = wcs.getPixelScale().asArcseconds()
164  hpix_area_arcsec2 = hp.nside2pixarea(self.config.nside, degrees=True)*(3600.**2.)
165  self._min_bad_min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.)
166 
167  metadata = {}
168  self._bits_per_visit_ccd_bits_per_visit_ccd = {}
169  self._bits_per_visit_bits_per_visit = defaultdict(list)
170  for bit, ccd_row in enumerate(ccds):
171  metadata[f"B{bit:04d}CCD"] = ccd_row["ccd"]
172  metadata[f"B{bit:04d}VIS"] = ccd_row["visit"]
173  metadata[f"B{bit:04d}WT"] = ccd_row["weight"]
174 
175  self._bits_per_visit_ccd_bits_per_visit_ccd[(ccd_row["visit"], ccd_row["ccd"])] = bit
176  self._bits_per_visit_bits_per_visit[ccd_row["visit"]].append(bit)
177 
178  ccd_poly = ccd_row.getValidPolygon()
179  if ccd_poly is None:
180  ccd_poly = afwGeom.Polygon(lsst.geom.Box2D(ccd_row.getBBox()))
181  # Detectors need to be rendered with their own wcs.
182  ccd_poly_radec = self._pixels_to_radec_pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices())
183 
184  # Create a ccd healsparse polygon
185  poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0],
186  dec=ccd_poly_radec[: -1, 1],
187  value=[bit])
188  self.ccd_input_mapccd_input_map.set_bits_pix(poly.get_pixels(nside=self.ccd_input_mapccd_input_map.nside_sparse),
189  [bit])
190 
191  # Cut down to the overall bounding box with associated wcs.
192  bbox_afw_poly = afwGeom.Polygon(lsst.geom.Box2D(bbox))
193  bbox_poly_radec = self._pixels_to_radec_pixels_to_radec(self._wcs_wcs,
194  bbox_afw_poly.convexHull().getVertices())
195  bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1],
196  value=np.arange(self.ccd_input_mapccd_input_map.wide_mask_maxbits))
197  bbox_poly_map = bbox_poly.get_map_like(self.ccd_input_mapccd_input_map)
198  self.ccd_input_mapccd_input_map = hsp.and_intersection([self.ccd_input_mapccd_input_map, bbox_poly_map])
199  self.ccd_input_mapccd_input_map.metadata = metadata
200 
201  # Create a temporary map to hold the count of bad pixels in each healpix pixel
202  self._ccd_input_pixels_ccd_input_pixels = self.ccd_input_mapccd_input_map.valid_pixels
203 
204  dtype = [(f"v{visit}", "i4") for visit in self._bits_per_visit_bits_per_visit.keys()]
205 
206  cov = self.config.nside_coverage
207  ns = self.config.nside
208  self._ccd_input_bad_count_map_ccd_input_bad_count_map = hsp.HealSparseMap.make_empty(nside_coverage=cov,
209  nside_sparse=ns,
210  dtype=dtype,
211  primary=dtype[0][0])
212  # Don't set input bad map if there are no ccds which overlap the bbox.
213  if len(self._ccd_input_pixels_ccd_input_pixels) > 0:
214  self._ccd_input_bad_count_map_ccd_input_bad_count_map[self._ccd_input_pixels_ccd_input_pixels] = np.zeros(1, dtype=dtype)
215 
216  def mask_warp_bbox(self, bbox, visit, mask, bit_mask_value):
217  """Mask a subregion from a visit.
218  This must be run after build_ccd_input_map initializes
219  the overall map.
220 
221  Parameters
222  ----------
223  bbox : `lsst.geom.Box2I`
224  Bounding box from region to mask.
225  visit : `int`
226  Visit number corresponding to warp with mask.
227  mask : `lsst.afw.image.MaskX`
228  Mask plane from warp exposure.
229  bit_mask_value : `int`
230  Bit mask to check for bad pixels.
231 
232  Raises
233  ------
234  RuntimeError : Raised if build_ccd_input_map was not run first.
235  """
236  if self.ccd_input_mapccd_input_map is None:
237  raise RuntimeError("Must run build_ccd_input_map before mask_warp_bbox")
238 
239  # Find the bad pixels and convert to healpix
240  bad_pixels = np.where(mask.array & bit_mask_value)
241  if len(bad_pixels[0]) == 0:
242  # No bad pixels
243  return
244 
245  # Bad pixels come from warps which use the overall wcs.
246  bad_ra, bad_dec = self._wcs_wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64),
247  bad_pixels[0].astype(np.float64),
248  degrees=True)
249  bad_hpix = hp.ang2pix(self.config.nside, bad_ra, bad_dec,
250  lonlat=True, nest=True)
251 
252  # Count the number of bad image pixels in each healpix pixel
253  min_bad_hpix = bad_hpix.min()
254  bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32)
255  np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1)
256 
257  # Add these to the accumulator map.
258  # We need to make sure that the "primary" array has valid values for
259  # this pixel to be registered in the accumulator map.
260  pix_to_add, = np.where(bad_hpix_count > 0)
261  count_map_arr = self._ccd_input_bad_count_map_ccd_input_bad_count_map[min_bad_hpix + pix_to_add]
262  primary = self._ccd_input_bad_count_map_ccd_input_bad_count_map.primary
263  count_map_arr[primary] = np.clip(count_map_arr[primary], 0, None)
264 
265  count_map_arr[f"v{visit}"] = np.clip(count_map_arr[f"v{visit}"], 0, None)
266  count_map_arr[f"v{visit}"] += bad_hpix_count[pix_to_add]
267 
268  self._ccd_input_bad_count_map_ccd_input_bad_count_map[min_bad_hpix + pix_to_add] = count_map_arr
269 
271  """Use accumulated mask information to finalize the masking of
272  ccd_input_map.
273 
274  Raises
275  ------
276  RuntimeError : Raised if build_ccd_input_map was not run first.
277  """
278  if self.ccd_input_mapccd_input_map is None:
279  raise RuntimeError("Must run build_ccd_input_map before finalize_ccd_input_map_mask.")
280 
281  count_map_arr = self._ccd_input_bad_count_map_ccd_input_bad_count_map[self._ccd_input_pixels_ccd_input_pixels]
282  for visit in self._bits_per_visit_bits_per_visit:
283  to_mask, = np.where(count_map_arr[f"v{visit}"] > self._min_bad_min_bad)
284  if to_mask.size == 0:
285  continue
286  self.ccd_input_mapccd_input_map.clear_bits_pix(self._ccd_input_pixels_ccd_input_pixels[to_mask],
287  self._bits_per_visit_bits_per_visit[visit])
288 
289  # Clear memory
290  self._ccd_input_bad_count_map_ccd_input_bad_count_map = None
291 
292  def _pixels_to_radec(self, wcs, pixels):
293  """Convert pixels to ra/dec positions using a wcs.
294 
295  Parameters
296  ----------
297  wcs : `lsst.afw.geom.SkyWcs`
298  WCS object.
299  pixels : `list` [`lsst.geom.Point2D`]
300  List of pixels to convert.
301 
302  Returns
303  -------
304  radec : `numpy.ndarray`
305  Nx2 array of ra/dec positions associated with pixels.
306  """
307  sph_pts = wcs.pixelToSky(pixels)
308  return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees())
309  for sph in sph_pts])
310 
311 
312 class HealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections,
313  dimensions=("tract", "band", "skymap",),
314  defaultTemplates={"coaddName": "deep"}):
315  input_maps = pipeBase.connectionTypes.Input(
316  doc="Healsparse bit-wise coadd input maps",
317  name="{coaddName}Coadd_inputMap",
318  storageClass="HealSparseMap",
319  dimensions=("tract", "patch", "skymap", "band"),
320  multiple=True,
321  deferLoad=True,
322  )
323  coadd_exposures = pipeBase.connectionTypes.Input(
324  doc="Coadded exposures associated with input_maps",
325  name="{coaddName}Coadd",
326  storageClass="ExposureF",
327  dimensions=("tract", "patch", "skymap", "band"),
328  multiple=True,
329  deferLoad=True,
330  )
331  visit_summaries = pipeBase.connectionTypes.Input(
332  doc="Visit summary tables with aggregated statistics",
333  name="visitSummary",
334  storageClass="ExposureCatalog",
335  dimensions=("instrument", "visit"),
336  multiple=True,
337  deferLoad=True,
338  )
339  sky_map = pipeBase.connectionTypes.Input(
340  doc="Input definition of geometry/bbox and projection/wcs for coadded exposures",
341  name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
342  storageClass="SkyMap",
343  dimensions=("skymap",),
344  )
345 
346  # Create output connections for all possible maps defined in the
347  # registry. The vars() trick used here allows us to set class attributes
348  # programatically. Taken from
349  # https://stackoverflow.com/questions/2519807/
350  # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class
351  for name in BasePropertyMap.registry:
352  vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Output(
353  doc=f"Minimum-value map of {name}",
354  name=f"{{coaddName}}Coadd_{name}_map_min",
355  storageClass="HealSparseMap",
356  dimensions=("tract", "skymap", "band"),
357  )
358  vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Output(
359  doc=f"Maximum-value map of {name}",
360  name=f"{{coaddName}}Coadd_{name}_map_max",
361  storageClass="HealSparseMap",
362  dimensions=("tract", "skymap", "band"),
363  )
364  vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Output(
365  doc=f"Mean-value map of {name}",
366  name=f"{{coaddName}}Coadd_{name}_map_mean",
367  storageClass="HealSparseMap",
368  dimensions=("tract", "skymap", "band"),
369  )
370  vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output(
371  doc=f"Weighted mean-value map of {name}",
372  name=f"{{coaddName}}Coadd_{name}_map_weighted_mean",
373  storageClass="HealSparseMap",
374  dimensions=("tract", "skymap", "band"),
375  )
376  vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Output(
377  doc=f"Sum-value map of {name}",
378  name=f"{{coaddName}}Coadd_{name}_map_sum",
379  storageClass="HealSparseMap",
380  dimensions=("tract", "skymap", "band"),
381  )
382 
383  def __init__(self, *, config=None):
384  super().__init__(config=config)
385 
386  # Not all possible maps in the registry will be configured to run.
387  # Here we remove the unused connections.
388  for name in BasePropertyMap.registry:
389  if name not in config.property_maps:
390  prop_config = BasePropertyMapConfig()
391  prop_config.do_min = False
392  prop_config.do_max = False
393  prop_config.do_mean = False
394  prop_config.do_weighted_mean = False
395  prop_config.do_sum = False
396  else:
397  prop_config = config.property_maps[name]
398 
399  if not prop_config.do_min:
400  self.outputs.remove(f"{name}_map_min")
401  if not prop_config.do_max:
402  self.outputs.remove(f"{name}_map_max")
403  if not prop_config.do_mean:
404  self.outputs.remove(f"{name}_map_mean")
405  if not prop_config.do_weighted_mean:
406  self.outputs.remove(f"{name}_map_weighted_mean")
407  if not prop_config.do_sum:
408  self.outputs.remove(f"{name}_map_sum")
409 
410 
411 class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
412  pipelineConnections=HealSparsePropertyMapConnections):
413  """Configuration parameters for HealSparsePropertyMapTask"""
414  property_maps = BasePropertyMap.registry.makeField(
415  multi=True,
416  default=["exposure_time",
417  "psf_size",
418  "psf_e1",
419  "psf_e2",
420  "psf_maglim",
421  "sky_noise",
422  "sky_background",
423  "dcr_dra",
424  "dcr_ddec",
425  "dcr_e1",
426  "dcr_e2"],
427  doc="Property map computation objects",
428  )
429 
430  def setDefaults(self):
431  self.property_maps["exposure_time"].do_sum = True
432  self.property_maps["psf_size"].do_weighted_mean = True
433  self.property_maps["psf_e1"].do_weighted_mean = True
434  self.property_maps["psf_e2"].do_weighted_mean = True
435  self.property_maps["psf_maglim"].do_weighted_mean = True
436  self.property_maps["sky_noise"].do_weighted_mean = True
437  self.property_maps["sky_background"].do_weighted_mean = True
438  self.property_maps["dcr_dra"].do_weighted_mean = True
439  self.property_maps["dcr_ddec"].do_weighted_mean = True
440  self.property_maps["dcr_e1"].do_weighted_mean = True
441  self.property_maps["dcr_e2"].do_weighted_mean = True
442 
443 
444 class HealSparsePropertyMapTask(pipeBase.PipelineTask):
445  """Task to compute Healsparse property maps.
446 
447  This task will compute individual property maps (per tract, per
448  map type, per band). These maps cover the full coadd tract, and
449  are not truncated to the inner tract region.
450  """
451  ConfigClass = HealSparsePropertyMapConfig
452  _DefaultName = "healSparsePropertyMapTask"
453 
454  def __init__(self, **kwargs):
455  super().__init__(**kwargs)
456  self.property_maps = PropertyMapMap()
457  for name, config, PropertyMapClass in self.config.property_maps.apply():
458  self.property_maps[name] = PropertyMapClass(config, name)
459 
460  @timeMethod
461  def runQuantum(self, butlerQC, inputRefs, outputRefs):
462  inputs = butlerQC.get(inputRefs)
463 
464  sky_map = inputs.pop("sky_map")
465 
466  tract = butlerQC.quantum.dataId["tract"]
467  band = butlerQC.quantum.dataId["band"]
468 
469  input_map_dict = {ref.dataId["patch"]: ref for ref in inputs["input_maps"]}
470  coadd_dict = {ref.dataId["patch"]: ref for ref in inputs["coadd_exposures"]}
471 
472  visit_summary_dict = {ref.dataId["visit"]: ref.get()
473  for ref in inputs["visit_summaries"]}
474 
475  self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict)
476 
477  # Write the outputs
478  for name, property_map in self.property_maps.items():
479  if property_map.config.do_min:
480  butlerQC.put(property_map.min_map,
481  getattr(outputRefs, f"{name}_map_min"))
482  if property_map.config.do_max:
483  butlerQC.put(property_map.max_map,
484  getattr(outputRefs, f"{name}_map_max"))
485  if property_map.config.do_mean:
486  butlerQC.put(property_map.mean_map,
487  getattr(outputRefs, f"{name}_map_mean"))
488  if property_map.config.do_weighted_mean:
489  butlerQC.put(property_map.weighted_mean_map,
490  getattr(outputRefs, f"{name}_map_weighted_mean"))
491  if property_map.config.do_sum:
492  butlerQC.put(property_map.sum_map,
493  getattr(outputRefs, f"{name}_map_sum"))
494 
495  def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict):
496  """Run the healsparse property task.
497 
498  Parameters
499  ----------
500  sky_map : Sky map object
501  tract : `int`
502  Tract number.
503  band : `str`
504  Band name for logging.
505  coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
506  Dictionary of coadd exposure references. Keys are patch numbers.
507  input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
508  Dictionary of input map references. Keys are patch numbers.
509  visit_summary_dict : `dict` [`int`: `lsst.afw.table.ExposureCatalog`]
510  Dictionary of visit summary tables. Keys are visit numbers.
511 
512  Raises
513  ------
514  RepeatableQuantumError
515  If visit_summary_dict is missing any visits or detectors found in an
516  input map. This leads to an inconsistency between what is in the coadd
517  (via the input map) and the visit summary tables which contain data
518  to compute the maps.
519  """
520  tract_info = sky_map[tract]
521 
522  tract_maps_initialized = False
523 
524  for patch in input_map_dict.keys():
525  self.log.info("Making maps for band %s, tract %d, patch %d.",
526  band, tract, patch)
527 
528  patch_info = tract_info[patch]
529 
530  input_map = input_map_dict[patch].get()
531  coadd_photo_calib = coadd_dict[patch].get(component="photoCalib")
532  coadd_inputs = coadd_dict[patch].get(component="coaddInputs")
533 
534  coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude())
535 
536  # Crop input_map to the inner polygon of the patch
537  poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
538  patch_radec = self._vertices_to_radec(poly_vertices)
539  patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1],
540  value=np.arange(input_map.wide_mask_maxbits))
541  patch_poly_map = patch_poly.get_map_like(input_map)
542  input_map = hsp.and_intersection([input_map, patch_poly_map])
543 
544  if not tract_maps_initialized:
545  # We use the first input map nside information to initialize
546  # the tract maps
547  nside_coverage = self._compute_nside_coverage_tract(tract_info)
548  nside = input_map.nside_sparse
549 
550  do_compute_approx_psf = False
551  # Initialize the tract maps
552  for property_map in self.property_maps:
553  property_map.initialize_tract_maps(nside_coverage, nside)
554  if property_map.requires_psf:
555  do_compute_approx_psf = True
556 
557  tract_maps_initialized = True
558 
559  valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=True)
560 
561  # Check if there are no valid pixels for the inner (unique) patch region
562  if valid_pixels.size == 0:
563  continue
564 
565  # Initialize the value accumulators
566  for property_map in self.property_maps:
567  property_map.initialize_values(valid_pixels.size)
568  property_map.zeropoint = coadd_zeropoint
569 
570  # Initialize the weight and counter accumulators
571  total_weights = np.zeros(valid_pixels.size)
572  total_inputs = np.zeros(valid_pixels.size, dtype=np.int32)
573 
574  for bit, ccd_row in enumerate(coadd_inputs.ccds):
575  # Which pixels in the map are used by this visit/detector
576  inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit]))
577 
578  # Check if there are any valid pixels in the map from this deteector.
579  if inmap.size == 0:
580  continue
581 
582  # visit, detector_id, weight = input_dict[bit]
583  visit = ccd_row["visit"]
584  detector_id = ccd_row["ccd"]
585  weight = ccd_row["weight"]
586 
587  x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=True)
588  scalings = self._compute_calib_scale(ccd_row, x, y)
589 
590  if do_compute_approx_psf:
591  psf_array = compute_approx_psf_size_and_shape(ccd_row, vpix_ra[inmap], vpix_dec[inmap])
592  else:
593  psf_array = None
594 
595  total_weights[inmap] += weight
596  total_inputs[inmap] += 1
597 
598  # Retrieve the correct visitSummary row
599  if visit not in visit_summary_dict:
600  msg = f"Visit {visit} not found in visit_summaries."
601  raise pipeBase.RepeatableQuantumError(msg)
602  row = visit_summary_dict[visit].find(detector_id)
603  if row is None:
604  msg = f"Visit {visit} / detector_id {detector_id} not found in visit_summaries."
605  raise pipeBase.RepeatableQuantumError(msg)
606 
607  # Accumulate the values
608  for property_map in self.property_maps:
609  property_map.accumulate_values(inmap,
610  vpix_ra[inmap],
611  vpix_dec[inmap],
612  weight,
613  scalings,
614  row,
615  psf_array=psf_array)
616 
617  # Finalize the mean values and set the tract maps
618  for property_map in self.property_maps:
619  property_map.finalize_mean_values(total_weights, total_inputs)
620  property_map.set_map_values(valid_pixels)
621 
622  def _compute_calib_scale(self, ccd_row, x, y):
623  """Compute calibration scaling values.
624 
625  Parameters
626  ----------
627  ccd_row : `lsst.afw.table.ExposureRecord`
628  Exposure metadata for a given detector exposure.
629  x : `np.ndarray`
630  Array of x positions.
631  y : `np.ndarray`
632  Array of y positions.
633 
634  Returns
635  -------
636  calib_scale : `np.ndarray`
637  Array of calibration scale values.
638  """
639  photo_calib = ccd_row.getPhotoCalib()
640  bf = photo_calib.computeScaledCalibration()
641  if bf.getBBox() == ccd_row.getBBox():
642  # Track variable calibration over the detector
643  calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y)
644  else:
645  # Spatially constant calibration
646  calib_scale = photo_calib.getCalibrationMean()
647 
648  return calib_scale
649 
650  def _vertices_to_radec(self, vertices):
651  """Convert polygon vertices to ra/dec.
652 
653  Parameters
654  ----------
655  vertices : `list` [ `lsst.sphgeom.UnitVector3d` ]
656  Vertices for bounding polygon.
657 
658  Returns
659  -------
660  radec : `numpy.ndarray`
661  Nx2 array of ra/dec positions (in degrees) associated with vertices.
662  """
663  lonlats = [lsst.sphgeom.LonLat(x) for x in vertices]
664  radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees()) for
665  x in lonlats])
666  return radec
667 
668  def _compute_nside_coverage_tract(self, tract_info):
669  """Compute the optimal coverage nside for a tract.
670 
671  Parameters
672  ----------
673  tract_info : `lsst.skymap.tractInfo.ExplicitTractInfo`
674  Tract information object.
675 
676  Returns
677  -------
678  nside_coverage : `int`
679  Optimal coverage nside for a tract map.
680  """
681  num_patches = tract_info.getNumPatches()
682 
683  # Compute approximate patch area
684  patch_info = tract_info.getPatchInfo(0)
685  vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices()
686  radec = self._vertices_to_radec(vertices)
687  delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0])
688  delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1])
689  patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1])))
690 
691  tract_area = num_patches[0]*num_patches[1]*patch_area
692  # Start with a fairly low nside and increase until we find the approximate area.
693  nside_coverage_tract = 32
694  while hp.nside2pixarea(nside_coverage_tract, degrees=True) > tract_area:
695  nside_coverage_tract = 2*nside_coverage_tract
696  # Step back one, but don't go bigger pixels than nside=32
697  nside_coverage_tract = int(np.clip(nside_coverage_tract/2, 32, None))
698 
699  return nside_coverage_tract
700 
701 
702 class ConsolidateHealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections,
703  dimensions=("band", "skymap",),
704  defaultTemplates={"coaddName": "deep"}):
705  sky_map = pipeBase.connectionTypes.Input(
706  doc="Input definition of geometry/bbox and projection/wcs for coadded exposures",
707  name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
708  storageClass="SkyMap",
709  dimensions=("skymap",),
710  )
711 
712  # Create output connections for all possible maps defined in the
713  # registry. The vars() trick used here allows us to set class attributes
714  # programatically. Taken from
715  # https://stackoverflow.com/questions/2519807/
716  # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class
717  for name in BasePropertyMap.registry:
718  vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Input(
719  doc=f"Minimum-value map of {name}",
720  name=f"{{coaddName}}Coadd_{name}_map_min",
721  storageClass="HealSparseMap",
722  dimensions=("tract", "skymap", "band"),
723  multiple=True,
724  deferLoad=True,
725  )
726  vars()[f"{name}_consolidated_map_min"] = pipeBase.connectionTypes.Output(
727  doc=f"Minumum-value map of {name}",
728  name=f"{{coaddName}}Coadd_{name}_consolidated_map_min",
729  storageClass="HealSparseMap",
730  dimensions=("skymap", "band"),
731  )
732  vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Input(
733  doc=f"Maximum-value map of {name}",
734  name=f"{{coaddName}}Coadd_{name}_map_max",
735  storageClass="HealSparseMap",
736  dimensions=("tract", "skymap", "band"),
737  multiple=True,
738  deferLoad=True,
739  )
740  vars()[f"{name}_consolidated_map_max"] = pipeBase.connectionTypes.Output(
741  doc=f"Minumum-value map of {name}",
742  name=f"{{coaddName}}Coadd_{name}_consolidated_map_max",
743  storageClass="HealSparseMap",
744  dimensions=("skymap", "band"),
745  )
746  vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Input(
747  doc=f"Mean-value map of {name}",
748  name=f"{{coaddName}}Coadd_{name}_map_mean",
749  storageClass="HealSparseMap",
750  dimensions=("tract", "skymap", "band"),
751  multiple=True,
752  deferLoad=True,
753  )
754  vars()[f"{name}_consolidated_map_mean"] = pipeBase.connectionTypes.Output(
755  doc=f"Minumum-value map of {name}",
756  name=f"{{coaddName}}Coadd_{name}_consolidated_map_mean",
757  storageClass="HealSparseMap",
758  dimensions=("skymap", "band"),
759  )
760  vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Input(
761  doc=f"Weighted mean-value map of {name}",
762  name=f"{{coaddName}}Coadd_{name}_map_weighted_mean",
763  storageClass="HealSparseMap",
764  dimensions=("tract", "skymap", "band"),
765  multiple=True,
766  deferLoad=True,
767  )
768  vars()[f"{name}_consolidated_map_weighted_mean"] = pipeBase.connectionTypes.Output(
769  doc=f"Minumum-value map of {name}",
770  name=f"{{coaddName}}Coadd_{name}_consolidated_map_weighted_mean",
771  storageClass="HealSparseMap",
772  dimensions=("skymap", "band"),
773  )
774  vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Input(
775  doc=f"Sum-value map of {name}",
776  name=f"{{coaddName}}Coadd_{name}_map_sum",
777  storageClass="HealSparseMap",
778  dimensions=("tract", "skymap", "band"),
779  multiple=True,
780  deferLoad=True,
781  )
782  vars()[f"{name}_consolidated_map_sum"] = pipeBase.connectionTypes.Output(
783  doc=f"Minumum-value map of {name}",
784  name=f"{{coaddName}}Coadd_{name}_consolidated_map_sum",
785  storageClass="HealSparseMap",
786  dimensions=("skymap", "band"),
787  )
788 
789  def __init__(self, *, config=None):
790  super().__init__(config=config)
791 
792  # Not all possible maps in the registry will be configured to run.
793  # Here we remove the unused connections.
794  for name in BasePropertyMap.registry:
795  if name not in config.property_maps:
796  prop_config = BasePropertyMapConfig()
797  prop_config.do_min = False
798  prop_config.do_max = False
799  prop_config.do_mean = False
800  prop_config.do_weighted_mean = False
801  prop_config.do_sum = False
802  else:
803  prop_config = config.property_maps[name]
804 
805  if not prop_config.do_min:
806  self.inputs.remove(f"{name}_map_min")
807  self.outputs.remove(f"{name}_consolidated_map_min")
808  if not prop_config.do_max:
809  self.inputs.remove(f"{name}_map_max")
810  self.outputs.remove(f"{name}_consolidated_map_max")
811  if not prop_config.do_mean:
812  self.inputs.remove(f"{name}_map_mean")
813  self.outputs.remove(f"{name}_consolidated_map_mean")
814  if not prop_config.do_weighted_mean:
815  self.inputs.remove(f"{name}_map_weighted_mean")
816  self.outputs.remove(f"{name}_consolidated_map_weighted_mean")
817  if not prop_config.do_sum:
818  self.inputs.remove(f"{name}_map_sum")
819  self.outputs.remove(f"{name}_consolidated_map_sum")
820 
821 
822 class ConsolidateHealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig,
823  pipelineConnections=ConsolidateHealSparsePropertyMapConnections):
824  """Configuration parameters for ConsolidateHealSparsePropertyMapTask"""
825  property_maps = BasePropertyMap.registry.makeField(
826  multi=True,
827  default=["exposure_time",
828  "psf_size",
829  "psf_e1",
830  "psf_e2",
831  "psf_maglim",
832  "sky_noise",
833  "sky_background",
834  "dcr_dra",
835  "dcr_ddec",
836  "dcr_e1",
837  "dcr_e2"],
838  doc="Property map computation objects",
839  )
840 
841  def setDefaults(self):
842  self.property_maps["exposure_time"].do_sum = True
843  self.property_maps["psf_size"].do_weighted_mean = True
844  self.property_maps["psf_e1"].do_weighted_mean = True
845  self.property_maps["psf_e2"].do_weighted_mean = True
846  self.property_maps["psf_maglim"].do_weighted_mean = True
847  self.property_maps["sky_noise"].do_weighted_mean = True
848  self.property_maps["sky_background"].do_weighted_mean = True
849  self.property_maps["dcr_dra"].do_weighted_mean = True
850  self.property_maps["dcr_ddec"].do_weighted_mean = True
851  self.property_maps["dcr_e1"].do_weighted_mean = True
852  self.property_maps["dcr_e2"].do_weighted_mean = True
853 
854 
855 class ConsolidateHealSparsePropertyMapTask(pipeBase.PipelineTask):
856  """Task to consolidate HealSparse property maps.
857 
858  This task will take all the individual tract-based maps (per map type,
859  per band) and consolidate them into one survey-wide map (per map type,
860  per band). Each tract map is truncated to its inner region before
861  consolidation.
862  """
863  ConfigClass = ConsolidateHealSparsePropertyMapConfig
864  _DefaultName = "consolidateHealSparsePropertyMapTask"
865 
866  def __init__(self, **kwargs):
867  super().__init__(**kwargs)
868  self.property_maps = PropertyMapMap()
869  for name, config, PropertyMapClass in self.config.property_maps.apply():
870  self.property_maps[name] = PropertyMapClass(config, name)
871 
872  @timeMethod
873  def runQuantum(self, butlerQC, inputRefs, outputRefs):
874  inputs = butlerQC.get(inputRefs)
875 
876  sky_map = inputs.pop("sky_map")
877 
878  # These need to be consolidated one at a time to conserve memory.
879  for name in self.config.property_maps.names:
880  for type_ in ['min', 'max', 'mean', 'weighted_mean', 'sum']:
881  map_type = f"{name}_map_{type_}"
882  if map_type in inputs:
883  input_refs = {ref.dataId['tract']: ref
884  for ref in inputs[map_type]}
885  consolidated_map = self.consolidate_map(sky_map, input_refs)
886  butlerQC.put(consolidated_map,
887  getattr(outputRefs, f"{name}_consolidated_map_{type_}"))
888 
889  def consolidate_map(self, sky_map, input_refs):
890  """Consolidate the healsparse property maps.
891 
892  Parameters
893  ----------
894  sky_map : Sky map object
895  input_refs : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`]
896  Dictionary of tract_id mapping to dataref.
897 
898  Returns
899  -------
900  consolidated_map : `healsparse.HealSparseMap`
901  Consolidated HealSparse map.
902  """
903  # First, we read in the coverage maps to know how much memory
904  # to allocate
905  cov_mask = None
906  for tract_id in input_refs:
907  cov = input_refs[tract_id].get(component='coverage')
908  if cov_mask is None:
909  cov_mask = cov.coverage_mask
910  else:
911  cov_mask |= cov.coverage_mask
912 
913  cov_pix, = np.where(cov_mask)
914 
915  # Now read in each tract map and build the consolidated map.
916  consolidated_map = None
917  for tract_id in input_refs:
918  input_map = input_refs[tract_id].get()
919  if consolidated_map is None:
920  dtype = input_map.dtype
921  sentinel = input_map._sentinel
922  nside_coverage = input_map.nside_coverage
923  nside_sparse = input_map.nside_sparse
924  consolidated_map = hsp.HealSparseMap.make_empty(nside_coverage,
925  nside_sparse,
926  dtype,
927  sentinel=sentinel)
928  consolidated_map._reserve_cov_pix(cov_pix)
929 
930  # Only use pixels that are properly inside the tract.
931  vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=True)
932  vpix_tract_ids = sky_map.findTractIdArray(ra, dec, degrees=True)
933 
934  in_tract = (vpix_tract_ids == tract_id)
935 
936  consolidated_map[vpix[in_tract]] = input_map[vpix[in_tract]]
937 
938  return consolidated_map
def mask_warp_bbox(self, bbox, visit, mask, bit_mask_value)
def compute_approx_psf_size_and_shape(ccd_row, ra, dec, nx=20, ny=20, orderx=2, ordery=2)