Coverage for python/lsst/pipe/tasks/healSparseMapping.py: 20%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

380 statements  

1# 

2# LSST Data Management System 

3# Copyright 2008-2021 AURA/LSST. 

4# 

5# This product includes software developed by the 

6# LSST Project (http://www.lsst.org/). 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <http://www.lsstcorp.org/LegalNotices/>. 

21# 

22from collections import defaultdict 

23import warnings 

24import numbers 

25import numpy as np 

26import healpy as hp 

27import healsparse as hsp 

28 

29import lsst.pex.config as pexConfig 

30import lsst.pipe.base as pipeBase 

31import lsst.geom 

32import lsst.afw.geom as afwGeom 

33from lsst.daf.butler import Formatter 

34from lsst.skymap import BaseSkyMap 

35from .healSparseMappingProperties import (BasePropertyMap, BasePropertyMapConfig, 

36 PropertyMapMap, compute_approx_psf_size_and_shape) 

37 

38 

39__all__ = ["HealSparseInputMapTask", "HealSparseInputMapConfig", 

40 "HealSparseMapFormatter", "HealSparsePropertyMapConnections", 

41 "HealSparsePropertyMapConfig", "HealSparsePropertyMapTask", 

42 "ConsolidateHealSparsePropertyMapConnections", 

43 "ConsolidateHealSparsePropertyMapConfig", 

44 "ConsolidateHealSparsePropertyMapTask"] 

45 

46 

47class HealSparseMapFormatter(Formatter): 

48 """Interface for reading and writing healsparse.HealSparseMap files.""" 

49 unsupportedParameters = frozenset() 

50 supportedExtensions = frozenset({".hsp", ".fit", ".fits"}) 

51 extension = '.hsp' 

52 

53 def read(self, component=None): 

54 # Docstring inherited from Formatter.read. 

55 path = self.fileDescriptor.location.path 

56 

57 if component == 'coverage': 

58 try: 

59 data = hsp.HealSparseCoverage.read(path) 

60 except (OSError, RuntimeError): 

61 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}") 

62 

63 return data 

64 

65 if self.fileDescriptor.parameters is None: 

66 pixels = None 

67 degrade_nside = None 

68 else: 

69 pixels = self.fileDescriptor.parameters.get('pixels', None) 

70 degrade_nside = self.fileDescriptor.parameters.get('degrade_nside', None) 

71 try: 

72 data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside) 

73 except (OSError, RuntimeError): 

74 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}") 

75 

76 return data 

77 

78 def write(self, inMemoryDataset): 

79 # Docstring inherited from Formatter.write. 

80 # Update the location with the formatter-preferred file extension 

81 self.fileDescriptor.location.updateExtension(self.extension) 

82 inMemoryDataset.write(self.fileDescriptor.location.path, clobber=True) 

83 

84 

85def _is_power_of_two(value): 

86 """Check that value is a power of two. 

87 

88 Parameters 

89 ---------- 

90 value : `int` 

91 Value to check. 

92 

93 Returns 

94 ------- 

95 is_power_of_two : `bool` 

96 True if value is a power of two; False otherwise, or 

97 if value is not an integer. 

98 """ 

99 if not isinstance(value, numbers.Integral): 99 ↛ 100line 99 didn't jump to line 100, because the condition on line 99 was never true

100 return False 

101 

102 # See https://stackoverflow.com/questions/57025836 

103 # Every power of 2 has exactly 1 bit set to 1; subtracting 

104 # 1 therefore flips every preceding bit. If you and that 

105 # together with the original value it must be 0. 

106 return (value & (value - 1) == 0) and value != 0 

107 

108 

109class HealSparseInputMapConfig(pexConfig.Config): 

110 """Configuration parameters for HealSparseInputMapTask""" 

111 nside = pexConfig.Field( 

112 doc="Mapping healpix nside. Must be power of 2.", 

113 dtype=int, 

114 default=32768, 

115 check=_is_power_of_two, 

116 ) 

117 nside_coverage = pexConfig.Field( 

118 doc="HealSparse coverage map nside. Must be power of 2.", 

119 dtype=int, 

120 default=256, 

121 check=_is_power_of_two, 

122 ) 

123 bad_mask_min_coverage = pexConfig.Field( 

124 doc=("Minimum area fraction of a map healpixel pixel that must be " 

125 "covered by bad pixels to be removed from the input map. " 

126 "This is approximate."), 

127 dtype=float, 

128 default=0.5, 

129 ) 

130 

131 

132class HealSparseInputMapTask(pipeBase.Task): 

133 """Task for making a HealSparse input map.""" 

134 

135 ConfigClass = HealSparseInputMapConfig 

136 _DefaultName = "healSparseInputMap" 

137 

138 def __init__(self, **kwargs): 

139 pipeBase.Task.__init__(self, **kwargs) 

140 

141 self.ccd_input_map = None 

142 

143 def build_ccd_input_map(self, bbox, wcs, ccds): 

144 """Build a map from ccd valid polygons or bounding boxes. 

145 

146 Parameters 

147 ---------- 

148 bbox : `lsst.geom.Box2I` 

149 Bounding box for region to build input map. 

150 wcs : `lsst.afw.geom.SkyWcs` 

151 WCS object for region to build input map. 

152 ccds : `lsst.afw.table.ExposureCatalog` 

153 Exposure catalog with ccd data from coadd inputs. 

154 """ 

155 with warnings.catch_warnings(): 

156 # Healsparse will emit a warning if nside coverage is greater than 

157 # 128. In the case of generating patch input maps, and not global 

158 # maps, high nside coverage works fine, so we can suppress this 

159 # warning. 

160 warnings.simplefilter("ignore") 

161 self.ccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage, 

162 nside_sparse=self.config.nside, 

163 dtype=hsp.WIDE_MASK, 

164 wide_mask_maxbits=len(ccds)) 

165 self._wcs = wcs 

166 self._bbox = bbox 

167 self._ccds = ccds 

168 

169 pixel_scale = wcs.getPixelScale().asArcseconds() 

170 hpix_area_arcsec2 = hp.nside2pixarea(self.config.nside, degrees=True)*(3600.**2.) 

171 self._min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.) 

172 

173 metadata = {} 

174 self._bits_per_visit_ccd = {} 

175 self._bits_per_visit = defaultdict(list) 

176 for bit, ccd_row in enumerate(ccds): 

177 metadata[f"B{bit:04d}CCD"] = ccd_row["ccd"] 

178 metadata[f"B{bit:04d}VIS"] = ccd_row["visit"] 

179 metadata[f"B{bit:04d}WT"] = ccd_row["weight"] 

180 

181 self._bits_per_visit_ccd[(ccd_row["visit"], ccd_row["ccd"])] = bit 

182 self._bits_per_visit[ccd_row["visit"]].append(bit) 

183 

184 ccd_poly = ccd_row.getValidPolygon() 

185 if ccd_poly is None: 

186 ccd_poly = afwGeom.Polygon(lsst.geom.Box2D(ccd_row.getBBox())) 

187 # Detectors need to be rendered with their own wcs. 

188 ccd_poly_radec = self._pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices()) 

189 

190 # Create a ccd healsparse polygon 

191 poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0], 

192 dec=ccd_poly_radec[: -1, 1], 

193 value=[bit]) 

194 self.ccd_input_map.set_bits_pix(poly.get_pixels(nside=self.ccd_input_map.nside_sparse), 

195 [bit]) 

196 

197 # Cut down to the overall bounding box with associated wcs. 

198 bbox_afw_poly = afwGeom.Polygon(lsst.geom.Box2D(bbox)) 

199 bbox_poly_radec = self._pixels_to_radec(self._wcs, 

200 bbox_afw_poly.convexHull().getVertices()) 

201 bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1], 

202 value=np.arange(self.ccd_input_map.wide_mask_maxbits)) 

203 bbox_poly_map = bbox_poly.get_map_like(self.ccd_input_map) 

204 self.ccd_input_map = hsp.and_intersection([self.ccd_input_map, bbox_poly_map]) 

205 self.ccd_input_map.metadata = metadata 

206 

207 # Create a temporary map to hold the count of bad pixels in each healpix pixel 

208 self._ccd_input_pixels = self.ccd_input_map.valid_pixels 

209 

210 dtype = [(f"v{visit}", "i4") for visit in self._bits_per_visit.keys()] 

211 

212 with warnings.catch_warnings(): 

213 # Healsparse will emit a warning if nside coverage is greater than 

214 # 128. In the case of generating patch input maps, and not global 

215 # maps, high nside coverage works fine, so we can suppress this 

216 # warning. 

217 warnings.simplefilter("ignore") 

218 self._ccd_input_bad_count_map = hsp.HealSparseMap.make_empty( 

219 nside_coverage=self.config.nside_coverage, 

220 nside_sparse=self.config.nside, 

221 dtype=dtype, 

222 primary=dtype[0][0]) 

223 

224 # Don't set input bad map if there are no ccds which overlap the bbox. 

225 if len(self._ccd_input_pixels) > 0: 

226 self._ccd_input_bad_count_map[self._ccd_input_pixels] = np.zeros(1, dtype=dtype) 

227 

228 def mask_warp_bbox(self, bbox, visit, mask, bit_mask_value): 

229 """Mask a subregion from a visit. 

230 This must be run after build_ccd_input_map initializes 

231 the overall map. 

232 

233 Parameters 

234 ---------- 

235 bbox : `lsst.geom.Box2I` 

236 Bounding box from region to mask. 

237 visit : `int` 

238 Visit number corresponding to warp with mask. 

239 mask : `lsst.afw.image.MaskX` 

240 Mask plane from warp exposure. 

241 bit_mask_value : `int` 

242 Bit mask to check for bad pixels. 

243 

244 Raises 

245 ------ 

246 RuntimeError : Raised if build_ccd_input_map was not run first. 

247 """ 

248 if self.ccd_input_map is None: 

249 raise RuntimeError("Must run build_ccd_input_map before mask_warp_bbox") 

250 

251 # Find the bad pixels and convert to healpix 

252 bad_pixels = np.where(mask.array & bit_mask_value) 

253 if len(bad_pixels[0]) == 0: 

254 # No bad pixels 

255 return 

256 

257 # Bad pixels come from warps which use the overall wcs. 

258 bad_ra, bad_dec = self._wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64), 

259 bad_pixels[0].astype(np.float64), 

260 degrees=True) 

261 bad_hpix = hp.ang2pix(self.config.nside, bad_ra, bad_dec, 

262 lonlat=True, nest=True) 

263 

264 # Count the number of bad image pixels in each healpix pixel 

265 min_bad_hpix = bad_hpix.min() 

266 bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32) 

267 np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1) 

268 

269 # Add these to the accumulator map. 

270 # We need to make sure that the "primary" array has valid values for 

271 # this pixel to be registered in the accumulator map. 

272 pix_to_add, = np.where(bad_hpix_count > 0) 

273 count_map_arr = self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] 

274 primary = self._ccd_input_bad_count_map.primary 

275 count_map_arr[primary] = np.clip(count_map_arr[primary], 0, None) 

276 

277 count_map_arr[f"v{visit}"] = np.clip(count_map_arr[f"v{visit}"], 0, None) 

278 count_map_arr[f"v{visit}"] += bad_hpix_count[pix_to_add] 

279 

280 self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] = count_map_arr 

281 

282 def finalize_ccd_input_map_mask(self): 

283 """Use accumulated mask information to finalize the masking of 

284 ccd_input_map. 

285 

286 Raises 

287 ------ 

288 RuntimeError : Raised if build_ccd_input_map was not run first. 

289 """ 

290 if self.ccd_input_map is None: 

291 raise RuntimeError("Must run build_ccd_input_map before finalize_ccd_input_map_mask.") 

292 

293 count_map_arr = self._ccd_input_bad_count_map[self._ccd_input_pixels] 

294 for visit in self._bits_per_visit: 

295 to_mask, = np.where(count_map_arr[f"v{visit}"] > self._min_bad) 

296 if to_mask.size == 0: 

297 continue 

298 self.ccd_input_map.clear_bits_pix(self._ccd_input_pixels[to_mask], 

299 self._bits_per_visit[visit]) 

300 

301 # Clear memory 

302 self._ccd_input_bad_count_map = None 

303 

304 def _pixels_to_radec(self, wcs, pixels): 

305 """Convert pixels to ra/dec positions using a wcs. 

306 

307 Parameters 

308 ---------- 

309 wcs : `lsst.afw.geom.SkyWcs` 

310 WCS object. 

311 pixels : `list` [`lsst.geom.Point2D`] 

312 List of pixels to convert. 

313 

314 Returns 

315 ------- 

316 radec : `numpy.ndarray` 

317 Nx2 array of ra/dec positions associated with pixels. 

318 """ 

319 sph_pts = wcs.pixelToSky(pixels) 

320 return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees()) 

321 for sph in sph_pts]) 

322 

323 

324class HealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections, 

325 dimensions=("tract", "band", "skymap",), 

326 defaultTemplates={"coaddName": "deep"}): 

327 input_maps = pipeBase.connectionTypes.Input( 

328 doc="Healsparse bit-wise coadd input maps", 

329 name="{coaddName}Coadd_inputMap", 

330 storageClass="HealSparseMap", 

331 dimensions=("tract", "patch", "skymap", "band"), 

332 multiple=True, 

333 deferLoad=True, 

334 ) 

335 coadd_exposures = pipeBase.connectionTypes.Input( 

336 doc="Coadded exposures associated with input_maps", 

337 name="{coaddName}Coadd", 

338 storageClass="ExposureF", 

339 dimensions=("tract", "patch", "skymap", "band"), 

340 multiple=True, 

341 deferLoad=True, 

342 ) 

343 visit_summaries = pipeBase.connectionTypes.Input( 

344 doc="Visit summary tables with aggregated statistics", 

345 name="visitSummary", 

346 storageClass="ExposureCatalog", 

347 dimensions=("instrument", "visit"), 

348 multiple=True, 

349 deferLoad=True, 

350 ) 

351 sky_map = pipeBase.connectionTypes.Input( 

352 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures", 

353 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

354 storageClass="SkyMap", 

355 dimensions=("skymap",), 

356 ) 

357 

358 # Create output connections for all possible maps defined in the 

359 # registry. The vars() trick used here allows us to set class attributes 

360 # programatically. Taken from 

361 # https://stackoverflow.com/questions/2519807/ 

362 # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class 

363 for name in BasePropertyMap.registry: 

364 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Output( 

365 doc=f"Minimum-value map of {name}", 

366 name=f"{{coaddName}}Coadd_{name}_map_min", 

367 storageClass="HealSparseMap", 

368 dimensions=("tract", "skymap", "band"), 

369 ) 

370 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Output( 

371 doc=f"Maximum-value map of {name}", 

372 name=f"{{coaddName}}Coadd_{name}_map_max", 

373 storageClass="HealSparseMap", 

374 dimensions=("tract", "skymap", "band"), 

375 ) 

376 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Output( 

377 doc=f"Mean-value map of {name}", 

378 name=f"{{coaddName}}Coadd_{name}_map_mean", 

379 storageClass="HealSparseMap", 

380 dimensions=("tract", "skymap", "band"), 

381 ) 

382 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output( 

383 doc=f"Weighted mean-value map of {name}", 

384 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean", 

385 storageClass="HealSparseMap", 

386 dimensions=("tract", "skymap", "band"), 

387 ) 

388 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Output( 

389 doc=f"Sum-value map of {name}", 

390 name=f"{{coaddName}}Coadd_{name}_map_sum", 

391 storageClass="HealSparseMap", 

392 dimensions=("tract", "skymap", "band"), 

393 ) 

394 

395 def __init__(self, *, config=None): 

396 super().__init__(config=config) 

397 

398 # Not all possible maps in the registry will be configured to run. 

399 # Here we remove the unused connections. 

400 for name in BasePropertyMap.registry: 

401 if name not in config.property_maps: 

402 prop_config = BasePropertyMapConfig() 

403 prop_config.do_min = False 

404 prop_config.do_max = False 

405 prop_config.do_mean = False 

406 prop_config.do_weighted_mean = False 

407 prop_config.do_sum = False 

408 else: 

409 prop_config = config.property_maps[name] 

410 

411 if not prop_config.do_min: 

412 self.outputs.remove(f"{name}_map_min") 

413 if not prop_config.do_max: 

414 self.outputs.remove(f"{name}_map_max") 

415 if not prop_config.do_mean: 

416 self.outputs.remove(f"{name}_map_mean") 

417 if not prop_config.do_weighted_mean: 

418 self.outputs.remove(f"{name}_map_weighted_mean") 

419 if not prop_config.do_sum: 

420 self.outputs.remove(f"{name}_map_sum") 

421 

422 

423class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig, 

424 pipelineConnections=HealSparsePropertyMapConnections): 

425 """Configuration parameters for HealSparsePropertyMapTask""" 

426 property_maps = BasePropertyMap.registry.makeField( 

427 multi=True, 

428 default=["exposure_time", 

429 "psf_size", 

430 "psf_e1", 

431 "psf_e2", 

432 "psf_maglim", 

433 "sky_noise", 

434 "sky_background", 

435 "dcr_dra", 

436 "dcr_ddec", 

437 "dcr_e1", 

438 "dcr_e2"], 

439 doc="Property map computation objects", 

440 ) 

441 

442 def setDefaults(self): 

443 self.property_maps["exposure_time"].do_sum = True 

444 self.property_maps["psf_size"].do_weighted_mean = True 

445 self.property_maps["psf_e1"].do_weighted_mean = True 

446 self.property_maps["psf_e2"].do_weighted_mean = True 

447 self.property_maps["psf_maglim"].do_weighted_mean = True 

448 self.property_maps["sky_noise"].do_weighted_mean = True 

449 self.property_maps["sky_background"].do_weighted_mean = True 

450 self.property_maps["dcr_dra"].do_weighted_mean = True 

451 self.property_maps["dcr_ddec"].do_weighted_mean = True 

452 self.property_maps["dcr_e1"].do_weighted_mean = True 

453 self.property_maps["dcr_e2"].do_weighted_mean = True 

454 

455 

456class HealSparsePropertyMapTask(pipeBase.PipelineTask): 

457 """Task to compute Healsparse property maps. 

458 

459 This task will compute individual property maps (per tract, per 

460 map type, per band). These maps cover the full coadd tract, and 

461 are not truncated to the inner tract region. 

462 """ 

463 ConfigClass = HealSparsePropertyMapConfig 

464 _DefaultName = "healSparsePropertyMapTask" 

465 

466 def __init__(self, **kwargs): 

467 super().__init__(**kwargs) 

468 self.property_maps = PropertyMapMap() 

469 for name, config, PropertyMapClass in self.config.property_maps.apply(): 

470 self.property_maps[name] = PropertyMapClass(config, name) 

471 

472 @pipeBase.timeMethod 

473 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

474 inputs = butlerQC.get(inputRefs) 

475 

476 sky_map = inputs.pop("sky_map") 

477 

478 tract = butlerQC.quantum.dataId["tract"] 

479 band = butlerQC.quantum.dataId["band"] 

480 

481 input_map_dict = {ref.dataId["patch"]: ref for ref in inputs["input_maps"]} 

482 coadd_dict = {ref.dataId["patch"]: ref for ref in inputs["coadd_exposures"]} 

483 

484 visit_summary_dict = {ref.dataId["visit"]: ref.get() 

485 for ref in inputs["visit_summaries"]} 

486 

487 self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict) 

488 

489 # Write the outputs 

490 for name, property_map in self.property_maps.items(): 

491 if property_map.config.do_min: 

492 butlerQC.put(property_map.min_map, 

493 getattr(outputRefs, f"{name}_map_min")) 

494 if property_map.config.do_max: 

495 butlerQC.put(property_map.max_map, 

496 getattr(outputRefs, f"{name}_map_max")) 

497 if property_map.config.do_mean: 

498 butlerQC.put(property_map.mean_map, 

499 getattr(outputRefs, f"{name}_map_mean")) 

500 if property_map.config.do_weighted_mean: 

501 butlerQC.put(property_map.weighted_mean_map, 

502 getattr(outputRefs, f"{name}_map_weighted_mean")) 

503 if property_map.config.do_sum: 

504 butlerQC.put(property_map.sum_map, 

505 getattr(outputRefs, f"{name}_map_sum")) 

506 

507 def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict): 

508 """Run the healsparse property task. 

509 

510 Parameters 

511 ---------- 

512 sky_map : Sky map object 

513 tract : `int` 

514 Tract number. 

515 band : `str` 

516 Band name for logging. 

517 coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

518 Dictionary of coadd exposure references. Keys are patch numbers. 

519 input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

520 Dictionary of input map references. Keys are patch numbers. 

521 visit_summary_dict : `dict` [`int`: `lsst.afw.table.ExposureCatalog`] 

522 Dictionary of visit summary tables. Keys are visit numbers. 

523 

524 Raises 

525 ------ 

526 RepeatableQuantumError 

527 If visit_summary_dict is missing any visits or detectors found in an 

528 input map. This leads to an inconsistency between what is in the coadd 

529 (via the input map) and the visit summary tables which contain data 

530 to compute the maps. 

531 """ 

532 tract_info = sky_map[tract] 

533 

534 tract_maps_initialized = False 

535 

536 for patch in input_map_dict.keys(): 

537 self.log.info("Making maps for band %s, tract %d, patch %d.", 

538 band, tract, patch) 

539 

540 patch_info = tract_info[patch] 

541 

542 input_map = input_map_dict[patch].get() 

543 coadd_photo_calib = coadd_dict[patch].get(component="photoCalib") 

544 coadd_inputs = coadd_dict[patch].get(component="coaddInputs") 

545 

546 coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude()) 

547 

548 # Crop input_map to the inner polygon of the patch 

549 poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices() 

550 patch_radec = self._vertices_to_radec(poly_vertices) 

551 patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1], 

552 value=np.arange(input_map.wide_mask_maxbits)) 

553 patch_poly_map = patch_poly.get_map_like(input_map) 

554 input_map = hsp.and_intersection([input_map, patch_poly_map]) 

555 

556 if not tract_maps_initialized: 

557 # We use the first input map nside information to initialize 

558 # the tract maps 

559 nside_coverage = self._compute_nside_coverage_tract(tract_info) 

560 nside = input_map.nside_sparse 

561 

562 do_compute_approx_psf = False 

563 # Initialize the tract maps 

564 for property_map in self.property_maps: 

565 property_map.initialize_tract_maps(nside_coverage, nside) 

566 if property_map.requires_psf: 

567 do_compute_approx_psf = True 

568 

569 tract_maps_initialized = True 

570 

571 valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=True) 

572 

573 # Check if there are no valid pixels for the inner (unique) patch region 

574 if valid_pixels.size == 0: 

575 continue 

576 

577 # Initialize the value accumulators 

578 for property_map in self.property_maps: 

579 property_map.initialize_values(valid_pixels.size) 

580 property_map.zeropoint = coadd_zeropoint 

581 

582 # Initialize the weight and counter accumulators 

583 total_weights = np.zeros(valid_pixels.size) 

584 total_inputs = np.zeros(valid_pixels.size, dtype=np.int32) 

585 

586 for bit, ccd_row in enumerate(coadd_inputs.ccds): 

587 # Which pixels in the map are used by this visit/detector 

588 inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit])) 

589 

590 # Check if there are any valid pixels in the map from this deteector. 

591 if inmap.size == 0: 

592 continue 

593 

594 # visit, detector_id, weight = input_dict[bit] 

595 visit = ccd_row["visit"] 

596 detector_id = ccd_row["ccd"] 

597 weight = ccd_row["weight"] 

598 

599 x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=True) 

600 scalings = self._compute_calib_scale(ccd_row, x, y) 

601 

602 if do_compute_approx_psf: 

603 psf_array = compute_approx_psf_size_and_shape(ccd_row, vpix_ra[inmap], vpix_dec[inmap]) 

604 else: 

605 psf_array = None 

606 

607 total_weights[inmap] += weight 

608 total_inputs[inmap] += 1 

609 

610 # Retrieve the correct visitSummary row 

611 if visit not in visit_summary_dict: 

612 msg = f"Visit {visit} not found in visit_summaries." 

613 raise pipeBase.RepeatableQuantumError(msg) 

614 row = visit_summary_dict[visit].find(detector_id) 

615 if row is None: 

616 msg = f"Visit {visit} / detector_id {detector_id} not found in visit_summaries." 

617 raise pipeBase.RepeatableQuantumError(msg) 

618 

619 # Accumulate the values 

620 for property_map in self.property_maps: 

621 property_map.accumulate_values(inmap, 

622 vpix_ra[inmap], 

623 vpix_dec[inmap], 

624 weight, 

625 scalings, 

626 row, 

627 psf_array=psf_array) 

628 

629 # Finalize the mean values and set the tract maps 

630 for property_map in self.property_maps: 

631 property_map.finalize_mean_values(total_weights, total_inputs) 

632 property_map.set_map_values(valid_pixels) 

633 

634 def _compute_calib_scale(self, ccd_row, x, y): 

635 """Compute calibration scaling values. 

636 

637 Parameters 

638 ---------- 

639 ccd_row : `lsst.afw.table.ExposureRecord` 

640 Exposure metadata for a given detector exposure. 

641 x : `np.ndarray` 

642 Array of x positions. 

643 y : `np.ndarray` 

644 Array of y positions. 

645 

646 Returns 

647 ------- 

648 calib_scale : `np.ndarray` 

649 Array of calibration scale values. 

650 """ 

651 photo_calib = ccd_row.getPhotoCalib() 

652 bf = photo_calib.computeScaledCalibration() 

653 if bf.getBBox() == ccd_row.getBBox(): 

654 # Track variable calibration over the detector 

655 calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y) 

656 else: 

657 # Spatially constant calibration 

658 calib_scale = photo_calib.getCalibrationMean() 

659 

660 return calib_scale 

661 

662 def _vertices_to_radec(self, vertices): 

663 """Convert polygon vertices to ra/dec. 

664 

665 Parameters 

666 ---------- 

667 vertices : `list` [ `lsst.sphgeom.UnitVector3d` ] 

668 Vertices for bounding polygon. 

669 

670 Returns 

671 ------- 

672 radec : `numpy.ndarray` 

673 Nx2 array of ra/dec positions (in degrees) associated with vertices. 

674 """ 

675 lonlats = [lsst.sphgeom.LonLat(x) for x in vertices] 

676 radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees()) for 

677 x in lonlats]) 

678 return radec 

679 

680 def _compute_nside_coverage_tract(self, tract_info): 

681 """Compute the optimal coverage nside for a tract. 

682 

683 Parameters 

684 ---------- 

685 tract_info : `lsst.skymap.tractInfo.ExplicitTractInfo` 

686 Tract information object. 

687 

688 Returns 

689 ------- 

690 nside_coverage : `int` 

691 Optimal coverage nside for a tract map. 

692 """ 

693 num_patches = tract_info.getNumPatches() 

694 

695 # Compute approximate patch area 

696 patch_info = tract_info.getPatchInfo(0) 

697 vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices() 

698 radec = self._vertices_to_radec(vertices) 

699 delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0]) 

700 delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1]) 

701 patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1]))) 

702 

703 tract_area = num_patches[0]*num_patches[1]*patch_area 

704 # Start with a fairly low nside and increase until we find the approximate area. 

705 nside_coverage_tract = 32 

706 while hp.nside2pixarea(nside_coverage_tract, degrees=True) > tract_area: 

707 nside_coverage_tract = 2*nside_coverage_tract 

708 # Step back one, but don't go bigger pixels than nside=32 or smaller 

709 # than 128 (recommended by healsparse). 

710 nside_coverage_tract = int(np.clip(nside_coverage_tract/2, 32, 128)) 

711 

712 return nside_coverage_tract 

713 

714 

715class ConsolidateHealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections, 

716 dimensions=("band", "skymap",), 

717 defaultTemplates={"coaddName": "deep"}): 

718 sky_map = pipeBase.connectionTypes.Input( 

719 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures", 

720 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

721 storageClass="SkyMap", 

722 dimensions=("skymap",), 

723 ) 

724 

725 # Create output connections for all possible maps defined in the 

726 # registry. The vars() trick used here allows us to set class attributes 

727 # programatically. Taken from 

728 # https://stackoverflow.com/questions/2519807/ 

729 # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class 

730 for name in BasePropertyMap.registry: 

731 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Input( 

732 doc=f"Minimum-value map of {name}", 

733 name=f"{{coaddName}}Coadd_{name}_map_min", 

734 storageClass="HealSparseMap", 

735 dimensions=("tract", "skymap", "band"), 

736 multiple=True, 

737 deferLoad=True, 

738 ) 

739 vars()[f"{name}_consolidated_map_min"] = pipeBase.connectionTypes.Output( 

740 doc=f"Minumum-value map of {name}", 

741 name=f"{{coaddName}}Coadd_{name}_consolidated_map_min", 

742 storageClass="HealSparseMap", 

743 dimensions=("skymap", "band"), 

744 ) 

745 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Input( 

746 doc=f"Maximum-value map of {name}", 

747 name=f"{{coaddName}}Coadd_{name}_map_max", 

748 storageClass="HealSparseMap", 

749 dimensions=("tract", "skymap", "band"), 

750 multiple=True, 

751 deferLoad=True, 

752 ) 

753 vars()[f"{name}_consolidated_map_max"] = pipeBase.connectionTypes.Output( 

754 doc=f"Minumum-value map of {name}", 

755 name=f"{{coaddName}}Coadd_{name}_consolidated_map_max", 

756 storageClass="HealSparseMap", 

757 dimensions=("skymap", "band"), 

758 ) 

759 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Input( 

760 doc=f"Mean-value map of {name}", 

761 name=f"{{coaddName}}Coadd_{name}_map_mean", 

762 storageClass="HealSparseMap", 

763 dimensions=("tract", "skymap", "band"), 

764 multiple=True, 

765 deferLoad=True, 

766 ) 

767 vars()[f"{name}_consolidated_map_mean"] = pipeBase.connectionTypes.Output( 

768 doc=f"Minumum-value map of {name}", 

769 name=f"{{coaddName}}Coadd_{name}_consolidated_map_mean", 

770 storageClass="HealSparseMap", 

771 dimensions=("skymap", "band"), 

772 ) 

773 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Input( 

774 doc=f"Weighted mean-value map of {name}", 

775 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean", 

776 storageClass="HealSparseMap", 

777 dimensions=("tract", "skymap", "band"), 

778 multiple=True, 

779 deferLoad=True, 

780 ) 

781 vars()[f"{name}_consolidated_map_weighted_mean"] = pipeBase.connectionTypes.Output( 

782 doc=f"Minumum-value map of {name}", 

783 name=f"{{coaddName}}Coadd_{name}_consolidated_map_weighted_mean", 

784 storageClass="HealSparseMap", 

785 dimensions=("skymap", "band"), 

786 ) 

787 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Input( 

788 doc=f"Sum-value map of {name}", 

789 name=f"{{coaddName}}Coadd_{name}_map_sum", 

790 storageClass="HealSparseMap", 

791 dimensions=("tract", "skymap", "band"), 

792 multiple=True, 

793 deferLoad=True, 

794 ) 

795 vars()[f"{name}_consolidated_map_sum"] = pipeBase.connectionTypes.Output( 

796 doc=f"Minumum-value map of {name}", 

797 name=f"{{coaddName}}Coadd_{name}_consolidated_map_sum", 

798 storageClass="HealSparseMap", 

799 dimensions=("skymap", "band"), 

800 ) 

801 

802 def __init__(self, *, config=None): 

803 super().__init__(config=config) 

804 

805 # Not all possible maps in the registry will be configured to run. 

806 # Here we remove the unused connections. 

807 for name in BasePropertyMap.registry: 

808 if name not in config.property_maps: 

809 prop_config = BasePropertyMapConfig() 

810 prop_config.do_min = False 

811 prop_config.do_max = False 

812 prop_config.do_mean = False 

813 prop_config.do_weighted_mean = False 

814 prop_config.do_sum = False 

815 else: 

816 prop_config = config.property_maps[name] 

817 

818 if not prop_config.do_min: 

819 self.inputs.remove(f"{name}_map_min") 

820 self.outputs.remove(f"{name}_consolidated_map_min") 

821 if not prop_config.do_max: 

822 self.inputs.remove(f"{name}_map_max") 

823 self.outputs.remove(f"{name}_consolidated_map_max") 

824 if not prop_config.do_mean: 

825 self.inputs.remove(f"{name}_map_mean") 

826 self.outputs.remove(f"{name}_consolidated_map_mean") 

827 if not prop_config.do_weighted_mean: 

828 self.inputs.remove(f"{name}_map_weighted_mean") 

829 self.outputs.remove(f"{name}_consolidated_map_weighted_mean") 

830 if not prop_config.do_sum: 

831 self.inputs.remove(f"{name}_map_sum") 

832 self.outputs.remove(f"{name}_consolidated_map_sum") 

833 

834 

835class ConsolidateHealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig, 

836 pipelineConnections=ConsolidateHealSparsePropertyMapConnections): 

837 """Configuration parameters for ConsolidateHealSparsePropertyMapTask""" 

838 property_maps = BasePropertyMap.registry.makeField( 

839 multi=True, 

840 default=["exposure_time", 

841 "psf_size", 

842 "psf_e1", 

843 "psf_e2", 

844 "psf_maglim", 

845 "sky_noise", 

846 "sky_background", 

847 "dcr_dra", 

848 "dcr_ddec", 

849 "dcr_e1", 

850 "dcr_e2"], 

851 doc="Property map computation objects", 

852 ) 

853 nside_coverage = pexConfig.Field( 

854 doc="Consolidated HealSparse coverage map nside. Must be power of 2.", 

855 dtype=int, 

856 default=32, 

857 check=_is_power_of_two, 

858 ) 

859 

860 def setDefaults(self): 

861 self.property_maps["exposure_time"].do_sum = True 

862 self.property_maps["psf_size"].do_weighted_mean = True 

863 self.property_maps["psf_e1"].do_weighted_mean = True 

864 self.property_maps["psf_e2"].do_weighted_mean = True 

865 self.property_maps["psf_maglim"].do_weighted_mean = True 

866 self.property_maps["sky_noise"].do_weighted_mean = True 

867 self.property_maps["sky_background"].do_weighted_mean = True 

868 self.property_maps["dcr_dra"].do_weighted_mean = True 

869 self.property_maps["dcr_ddec"].do_weighted_mean = True 

870 self.property_maps["dcr_e1"].do_weighted_mean = True 

871 self.property_maps["dcr_e2"].do_weighted_mean = True 

872 

873 

874class ConsolidateHealSparsePropertyMapTask(pipeBase.PipelineTask): 

875 """Task to consolidate HealSparse property maps. 

876 

877 This task will take all the individual tract-based maps (per map type, 

878 per band) and consolidate them into one survey-wide map (per map type, 

879 per band). Each tract map is truncated to its inner region before 

880 consolidation. 

881 """ 

882 ConfigClass = ConsolidateHealSparsePropertyMapConfig 

883 _DefaultName = "consolidateHealSparsePropertyMapTask" 

884 

885 def __init__(self, **kwargs): 

886 super().__init__(**kwargs) 

887 self.property_maps = PropertyMapMap() 

888 for name, config, PropertyMapClass in self.config.property_maps.apply(): 

889 self.property_maps[name] = PropertyMapClass(config, name) 

890 

891 @pipeBase.timeMethod 

892 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

893 inputs = butlerQC.get(inputRefs) 

894 

895 sky_map = inputs.pop("sky_map") 

896 

897 # These need to be consolidated one at a time to conserve memory. 

898 for name in self.config.property_maps.names: 

899 for type_ in ['min', 'max', 'mean', 'weighted_mean', 'sum']: 

900 map_type = f"{name}_map_{type_}" 

901 if map_type in inputs: 

902 input_refs = {ref.dataId['tract']: ref 

903 for ref in inputs[map_type]} 

904 consolidated_map = self.consolidate_map(sky_map, input_refs) 

905 butlerQC.put(consolidated_map, 

906 getattr(outputRefs, f"{name}_consolidated_map_{type_}")) 

907 

908 def consolidate_map(self, sky_map, input_refs): 

909 """Consolidate the healsparse property maps. 

910 

911 Parameters 

912 ---------- 

913 sky_map : Sky map object 

914 input_refs : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

915 Dictionary of tract_id mapping to dataref. 

916 

917 Returns 

918 ------- 

919 consolidated_map : `healsparse.HealSparseMap` 

920 Consolidated HealSparse map. 

921 """ 

922 # First, we read in the coverage maps to know how much memory 

923 # to allocate 

924 cov_mask = None 

925 nside_coverage_inputs = None 

926 for tract_id in input_refs: 

927 cov = input_refs[tract_id].get(component='coverage') 

928 if cov_mask is None: 

929 cov_mask = cov.coverage_mask 

930 nside_coverage_inputs = cov.nside_coverage 

931 else: 

932 cov_mask |= cov.coverage_mask 

933 

934 cov_pix_inputs, = np.where(cov_mask) 

935 

936 # Compute the coverage pixels for the desired nside_coverage 

937 if nside_coverage_inputs == self.config.nside_coverage: 

938 cov_pix = cov_pix_inputs 

939 elif nside_coverage_inputs > self.config.nside_coverage: 

940 # Converting from higher resolution coverage to lower 

941 # resolution coverage. 

942 bit_shift = hsp.utils._compute_bitshift(self.config.nside_coverage, 

943 nside_coverage_inputs) 

944 cov_pix = np.right_shift(cov_pix_inputs, bit_shift) 

945 else: 

946 # Converting from lower resolution coverage to higher 

947 # resolution coverage. 

948 bit_shift = hsp.utils._compute_bitshift(nside_coverage_inputs, 

949 self.config.nside_coverage) 

950 cov_pix = np.left_shift(cov_pix_inputs, bit_shift) 

951 

952 # Now read in each tract map and build the consolidated map. 

953 consolidated_map = None 

954 for tract_id in input_refs: 

955 input_map = input_refs[tract_id].get() 

956 if consolidated_map is None: 

957 consolidated_map = hsp.HealSparseMap.make_empty( 

958 self.config.nside_coverage, 

959 input_map.nside_sparse, 

960 input_map.dtype, 

961 sentinel=input_map._sentinel, 

962 cov_pixels=cov_pix) 

963 

964 # Only use pixels that are properly inside the tract. 

965 vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=True) 

966 vpix_tract_ids = sky_map.findTractIdArray(ra, dec, degrees=True) 

967 

968 in_tract = (vpix_tract_ids == tract_id) 

969 

970 consolidated_map[vpix[in_tract]] = input_map[vpix[in_tract]] 

971 

972 return consolidated_map