Coverage for python/lsst/pipe/tasks/healSparseMapping.py: 16%

393 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-05-03 03:39 -0700

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22__all__ = ["HealSparseInputMapTask", "HealSparseInputMapConfig", 

23 "HealSparseMapFormatter", "HealSparsePropertyMapConnections", 

24 "HealSparsePropertyMapConfig", "HealSparsePropertyMapTask", 

25 "ConsolidateHealSparsePropertyMapConnections", 

26 "ConsolidateHealSparsePropertyMapConfig", 

27 "ConsolidateHealSparsePropertyMapTask"] 

28 

29from collections import defaultdict 

30import warnings 

31import numbers 

32import numpy as np 

33import hpgeom as hpg 

34import healsparse as hsp 

35 

36import lsst.pex.config as pexConfig 

37import lsst.pipe.base as pipeBase 

38import lsst.geom 

39import lsst.afw.geom as afwGeom 

40from lsst.daf.butler import Formatter 

41from lsst.skymap import BaseSkyMap 

42from lsst.utils.timer import timeMethod 

43from .healSparseMappingProperties import (BasePropertyMap, BasePropertyMapConfig, 

44 PropertyMapMap, compute_approx_psf_size_and_shape) 

45 

46 

47class HealSparseMapFormatter(Formatter): 

48 """Interface for reading and writing healsparse.HealSparseMap files.""" 

49 unsupportedParameters = frozenset() 

50 supportedExtensions = frozenset({".hsp", ".fit", ".fits"}) 

51 extension = '.hsp' 

52 

53 def read(self, component=None): 

54 # Docstring inherited from Formatter.read. 

55 path = self.fileDescriptor.location.path 

56 

57 if component == 'coverage': 

58 try: 

59 data = hsp.HealSparseCoverage.read(path) 

60 except (OSError, RuntimeError): 

61 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}") 

62 

63 return data 

64 

65 if self.fileDescriptor.parameters is None: 

66 pixels = None 

67 degrade_nside = None 

68 else: 

69 pixels = self.fileDescriptor.parameters.get('pixels', None) 

70 degrade_nside = self.fileDescriptor.parameters.get('degrade_nside', None) 

71 try: 

72 data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside) 

73 except (OSError, RuntimeError): 

74 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}") 

75 

76 return data 

77 

78 def write(self, inMemoryDataset): 

79 # Docstring inherited from Formatter.write. 

80 # Update the location with the formatter-preferred file extension 

81 self.fileDescriptor.location.updateExtension(self.extension) 

82 inMemoryDataset.write(self.fileDescriptor.location.path, clobber=True) 

83 

84 

85def _is_power_of_two(value): 

86 """Check that value is a power of two. 

87 

88 Parameters 

89 ---------- 

90 value : `int` 

91 Value to check. 

92 

93 Returns 

94 ------- 

95 is_power_of_two : `bool` 

96 True if value is a power of two; False otherwise, or 

97 if value is not an integer. 

98 """ 

99 if not isinstance(value, numbers.Integral): 

100 return False 

101 

102 # See https://stackoverflow.com/questions/57025836 

103 # Every power of 2 has exactly 1 bit set to 1; subtracting 

104 # 1 therefore flips every preceding bit. If you and that 

105 # together with the original value it must be 0. 

106 return (value & (value - 1) == 0) and value != 0 

107 

108 

109class HealSparseInputMapConfig(pexConfig.Config): 

110 """Configuration parameters for HealSparseInputMapTask""" 

111 nside = pexConfig.Field( 

112 doc="Mapping healpix nside. Must be power of 2.", 

113 dtype=int, 

114 default=32768, 

115 check=_is_power_of_two, 

116 ) 

117 nside_coverage = pexConfig.Field( 

118 doc="HealSparse coverage map nside. Must be power of 2.", 

119 dtype=int, 

120 default=256, 

121 check=_is_power_of_two, 

122 ) 

123 bad_mask_min_coverage = pexConfig.Field( 

124 doc=("Minimum area fraction of a map healpixel pixel that must be " 

125 "covered by bad pixels to be removed from the input map. " 

126 "This is approximate."), 

127 dtype=float, 

128 default=0.5, 

129 ) 

130 

131 

132class HealSparseInputMapTask(pipeBase.Task): 

133 """Task for making a HealSparse input map.""" 

134 

135 ConfigClass = HealSparseInputMapConfig 

136 _DefaultName = "healSparseInputMap" 

137 

138 def __init__(self, **kwargs): 

139 pipeBase.Task.__init__(self, **kwargs) 

140 

141 self.ccd_input_map = None 

142 

143 def build_ccd_input_map(self, bbox, wcs, ccds): 

144 """Build a map from ccd valid polygons or bounding boxes. 

145 

146 Parameters 

147 ---------- 

148 bbox : `lsst.geom.Box2I` 

149 Bounding box for region to build input map. 

150 wcs : `lsst.afw.geom.SkyWcs` 

151 WCS object for region to build input map. 

152 ccds : `lsst.afw.table.ExposureCatalog` 

153 Exposure catalog with ccd data from coadd inputs. 

154 """ 

155 with warnings.catch_warnings(): 

156 # Healsparse will emit a warning if nside coverage is greater than 

157 # 128. In the case of generating patch input maps, and not global 

158 # maps, high nside coverage works fine, so we can suppress this 

159 # warning. 

160 warnings.simplefilter("ignore") 

161 self.ccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage, 

162 nside_sparse=self.config.nside, 

163 dtype=hsp.WIDE_MASK, 

164 wide_mask_maxbits=len(ccds)) 

165 self._wcs = wcs 

166 self._bbox = bbox 

167 self._ccds = ccds 

168 

169 pixel_scale = wcs.getPixelScale().asArcseconds() 

170 hpix_area_arcsec2 = hpg.nside_to_pixel_area(self.config.nside, degrees=True)*(3600.**2.) 

171 self._min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.) 

172 

173 metadata = {} 

174 self._bits_per_visit_ccd = {} 

175 self._bits_per_visit = defaultdict(list) 

176 for bit, ccd_row in enumerate(ccds): 

177 metadata[f"B{bit:04d}CCD"] = ccd_row["ccd"] 

178 metadata[f"B{bit:04d}VIS"] = ccd_row["visit"] 

179 metadata[f"B{bit:04d}WT"] = ccd_row["weight"] 

180 

181 self._bits_per_visit_ccd[(ccd_row["visit"], ccd_row["ccd"])] = bit 

182 self._bits_per_visit[ccd_row["visit"]].append(bit) 

183 

184 ccd_poly = ccd_row.getValidPolygon() 

185 if ccd_poly is None: 

186 ccd_poly = afwGeom.Polygon(lsst.geom.Box2D(ccd_row.getBBox())) 

187 # Detectors need to be rendered with their own wcs. 

188 ccd_poly_radec = self._pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices()) 

189 

190 # Create a ccd healsparse polygon 

191 poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0], 

192 dec=ccd_poly_radec[: -1, 1], 

193 value=[bit]) 

194 self.ccd_input_map.set_bits_pix(poly.get_pixels(nside=self.ccd_input_map.nside_sparse), 

195 [bit]) 

196 

197 # Cut down to the overall bounding box with associated wcs. 

198 bbox_afw_poly = afwGeom.Polygon(lsst.geom.Box2D(bbox)) 

199 bbox_poly_radec = self._pixels_to_radec(self._wcs, 

200 bbox_afw_poly.convexHull().getVertices()) 

201 bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1], 

202 value=np.arange(self.ccd_input_map.wide_mask_maxbits)) 

203 with warnings.catch_warnings(): 

204 warnings.simplefilter("ignore") 

205 bbox_poly_map = bbox_poly.get_map_like(self.ccd_input_map) 

206 self.ccd_input_map = hsp.and_intersection([self.ccd_input_map, bbox_poly_map]) 

207 self.ccd_input_map.metadata = metadata 

208 

209 # Create a temporary map to hold the count of bad pixels in each healpix pixel 

210 self._ccd_input_pixels = self.ccd_input_map.valid_pixels 

211 

212 dtype = [(f"v{visit}", np.int64) for visit in self._bits_per_visit.keys()] 

213 

214 with warnings.catch_warnings(): 

215 # Healsparse will emit a warning if nside coverage is greater than 

216 # 128. In the case of generating patch input maps, and not global 

217 # maps, high nside coverage works fine, so we can suppress this 

218 # warning. 

219 warnings.simplefilter("ignore") 

220 self._ccd_input_bad_count_map = hsp.HealSparseMap.make_empty( 

221 nside_coverage=self.config.nside_coverage, 

222 nside_sparse=self.config.nside, 

223 dtype=dtype, 

224 primary=dtype[0][0]) 

225 

226 # Don't set input bad map if there are no ccds which overlap the bbox. 

227 if len(self._ccd_input_pixels) > 0: 

228 self._ccd_input_bad_count_map[self._ccd_input_pixels] = np.zeros(1, dtype=dtype) 

229 

230 def mask_warp_bbox(self, bbox, visit, mask, bit_mask_value): 

231 """Mask a subregion from a visit. 

232 This must be run after build_ccd_input_map initializes 

233 the overall map. 

234 

235 Parameters 

236 ---------- 

237 bbox : `lsst.geom.Box2I` 

238 Bounding box from region to mask. 

239 visit : `int` 

240 Visit number corresponding to warp with mask. 

241 mask : `lsst.afw.image.MaskX` 

242 Mask plane from warp exposure. 

243 bit_mask_value : `int` 

244 Bit mask to check for bad pixels. 

245 

246 Raises 

247 ------ 

248 RuntimeError : Raised if build_ccd_input_map was not run first. 

249 """ 

250 if self.ccd_input_map is None: 

251 raise RuntimeError("Must run build_ccd_input_map before mask_warp_bbox") 

252 

253 # Find the bad pixels and convert to healpix 

254 bad_pixels = np.where(mask.array & bit_mask_value) 

255 if len(bad_pixels[0]) == 0: 

256 # No bad pixels 

257 return 

258 

259 # Bad pixels come from warps which use the overall wcs. 

260 bad_ra, bad_dec = self._wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64), 

261 bad_pixels[0].astype(np.float64), 

262 degrees=True) 

263 bad_hpix = hpg.angle_to_pixel(self.config.nside, bad_ra, bad_dec) 

264 

265 # Count the number of bad image pixels in each healpix pixel 

266 min_bad_hpix = bad_hpix.min() 

267 bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32) 

268 np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1) 

269 

270 # Add these to the accumulator map. 

271 # We need to make sure that the "primary" array has valid values for 

272 # this pixel to be registered in the accumulator map. 

273 pix_to_add, = np.where(bad_hpix_count > 0) 

274 count_map_arr = self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] 

275 primary = self._ccd_input_bad_count_map.primary 

276 count_map_arr[primary] = np.clip(count_map_arr[primary], 0, None) 

277 

278 count_map_arr[f"v{visit}"] = np.clip(count_map_arr[f"v{visit}"], 0, None) 

279 count_map_arr[f"v{visit}"] += bad_hpix_count[pix_to_add] 

280 

281 self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] = count_map_arr 

282 

283 def finalize_ccd_input_map_mask(self): 

284 """Use accumulated mask information to finalize the masking of 

285 ccd_input_map. 

286 

287 Raises 

288 ------ 

289 RuntimeError : Raised if build_ccd_input_map was not run first. 

290 """ 

291 if self.ccd_input_map is None: 

292 raise RuntimeError("Must run build_ccd_input_map before finalize_ccd_input_map_mask.") 

293 

294 count_map_arr = self._ccd_input_bad_count_map[self._ccd_input_pixels] 

295 for visit in self._bits_per_visit: 

296 to_mask, = np.where(count_map_arr[f"v{visit}"] > self._min_bad) 

297 if to_mask.size == 0: 

298 continue 

299 self.ccd_input_map.clear_bits_pix(self._ccd_input_pixels[to_mask], 

300 self._bits_per_visit[visit]) 

301 

302 # Clear memory 

303 self._ccd_input_bad_count_map = None 

304 

305 def _pixels_to_radec(self, wcs, pixels): 

306 """Convert pixels to ra/dec positions using a wcs. 

307 

308 Parameters 

309 ---------- 

310 wcs : `lsst.afw.geom.SkyWcs` 

311 WCS object. 

312 pixels : `list` [`lsst.geom.Point2D`] 

313 List of pixels to convert. 

314 

315 Returns 

316 ------- 

317 radec : `numpy.ndarray` 

318 Nx2 array of ra/dec positions associated with pixels. 

319 """ 

320 sph_pts = wcs.pixelToSky(pixels) 

321 return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees()) 

322 for sph in sph_pts]) 

323 

324 

325class HealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections, 

326 dimensions=("tract", "band", "skymap",), 

327 defaultTemplates={"coaddName": "deep", 

328 "calexpType": ""}): 

329 input_maps = pipeBase.connectionTypes.Input( 

330 doc="Healsparse bit-wise coadd input maps", 

331 name="{coaddName}Coadd_inputMap", 

332 storageClass="HealSparseMap", 

333 dimensions=("tract", "patch", "skymap", "band"), 

334 multiple=True, 

335 deferLoad=True, 

336 ) 

337 coadd_exposures = pipeBase.connectionTypes.Input( 

338 doc="Coadded exposures associated with input_maps", 

339 name="{coaddName}Coadd", 

340 storageClass="ExposureF", 

341 dimensions=("tract", "patch", "skymap", "band"), 

342 multiple=True, 

343 deferLoad=True, 

344 ) 

345 visit_summaries = pipeBase.connectionTypes.Input( 

346 doc="Visit summary tables with aggregated statistics", 

347 name="finalVisitSummary", 

348 storageClass="ExposureCatalog", 

349 dimensions=("instrument", "visit"), 

350 multiple=True, 

351 deferLoad=True, 

352 ) 

353 sky_map = pipeBase.connectionTypes.Input( 

354 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures", 

355 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

356 storageClass="SkyMap", 

357 dimensions=("skymap",), 

358 ) 

359 

360 # Create output connections for all possible maps defined in the 

361 # registry. The vars() trick used here allows us to set class attributes 

362 # programatically. Taken from 

363 # https://stackoverflow.com/questions/2519807/ 

364 # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class 

365 for name in BasePropertyMap.registry: 

366 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Output( 

367 doc=f"Minimum-value map of {name}", 

368 name=f"{{coaddName}}Coadd_{name}_map_min", 

369 storageClass="HealSparseMap", 

370 dimensions=("tract", "skymap", "band"), 

371 ) 

372 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Output( 

373 doc=f"Maximum-value map of {name}", 

374 name=f"{{coaddName}}Coadd_{name}_map_max", 

375 storageClass="HealSparseMap", 

376 dimensions=("tract", "skymap", "band"), 

377 ) 

378 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Output( 

379 doc=f"Mean-value map of {name}", 

380 name=f"{{coaddName}}Coadd_{name}_map_mean", 

381 storageClass="HealSparseMap", 

382 dimensions=("tract", "skymap", "band"), 

383 ) 

384 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output( 

385 doc=f"Weighted mean-value map of {name}", 

386 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean", 

387 storageClass="HealSparseMap", 

388 dimensions=("tract", "skymap", "band"), 

389 ) 

390 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Output( 

391 doc=f"Sum-value map of {name}", 

392 name=f"{{coaddName}}Coadd_{name}_map_sum", 

393 storageClass="HealSparseMap", 

394 dimensions=("tract", "skymap", "band"), 

395 ) 

396 

397 def __init__(self, *, config=None): 

398 super().__init__(config=config) 

399 

400 # Not all possible maps in the registry will be configured to run. 

401 # Here we remove the unused connections. 

402 for name in BasePropertyMap.registry: 

403 if name not in config.property_maps: 

404 prop_config = BasePropertyMapConfig() 

405 prop_config.do_min = False 

406 prop_config.do_max = False 

407 prop_config.do_mean = False 

408 prop_config.do_weighted_mean = False 

409 prop_config.do_sum = False 

410 else: 

411 prop_config = config.property_maps[name] 

412 

413 if not prop_config.do_min: 

414 self.outputs.remove(f"{name}_map_min") 

415 if not prop_config.do_max: 

416 self.outputs.remove(f"{name}_map_max") 

417 if not prop_config.do_mean: 

418 self.outputs.remove(f"{name}_map_mean") 

419 if not prop_config.do_weighted_mean: 

420 self.outputs.remove(f"{name}_map_weighted_mean") 

421 if not prop_config.do_sum: 

422 self.outputs.remove(f"{name}_map_sum") 

423 

424 

425class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig, 

426 pipelineConnections=HealSparsePropertyMapConnections): 

427 """Configuration parameters for HealSparsePropertyMapTask""" 

428 property_maps = BasePropertyMap.registry.makeField( 

429 multi=True, 

430 default=["exposure_time", 

431 "psf_size", 

432 "psf_e1", 

433 "psf_e2", 

434 "psf_maglim", 

435 "sky_noise", 

436 "sky_background", 

437 "dcr_dra", 

438 "dcr_ddec", 

439 "dcr_e1", 

440 "dcr_e2", 

441 "epoch"], 

442 doc="Property map computation objects", 

443 ) 

444 

445 def setDefaults(self): 

446 self.property_maps["exposure_time"].do_sum = True 

447 self.property_maps["psf_size"].do_weighted_mean = True 

448 self.property_maps["psf_e1"].do_weighted_mean = True 

449 self.property_maps["psf_e2"].do_weighted_mean = True 

450 self.property_maps["psf_maglim"].do_weighted_mean = True 

451 self.property_maps["sky_noise"].do_weighted_mean = True 

452 self.property_maps["sky_background"].do_weighted_mean = True 

453 self.property_maps["dcr_dra"].do_weighted_mean = True 

454 self.property_maps["dcr_ddec"].do_weighted_mean = True 

455 self.property_maps["dcr_e1"].do_weighted_mean = True 

456 self.property_maps["dcr_e2"].do_weighted_mean = True 

457 self.property_maps["epoch"].do_mean = True 

458 self.property_maps["epoch"].do_min = True 

459 self.property_maps["epoch"].do_max = True 

460 

461 

462class HealSparsePropertyMapTask(pipeBase.PipelineTask): 

463 """Task to compute Healsparse property maps. 

464 

465 This task will compute individual property maps (per tract, per 

466 map type, per band). These maps cover the full coadd tract, and 

467 are not truncated to the inner tract region. 

468 """ 

469 ConfigClass = HealSparsePropertyMapConfig 

470 _DefaultName = "healSparsePropertyMapTask" 

471 

472 def __init__(self, **kwargs): 

473 super().__init__(**kwargs) 

474 self.property_maps = PropertyMapMap() 

475 for name, config, PropertyMapClass in self.config.property_maps.apply(): 

476 self.property_maps[name] = PropertyMapClass(config, name) 

477 

478 @timeMethod 

479 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

480 inputs = butlerQC.get(inputRefs) 

481 

482 sky_map = inputs.pop("sky_map") 

483 

484 tract = butlerQC.quantum.dataId["tract"] 

485 band = butlerQC.quantum.dataId["band"] 

486 

487 input_map_dict = {ref.dataId["patch"]: ref for ref in inputs["input_maps"]} 

488 coadd_dict = {ref.dataId["patch"]: ref for ref in inputs["coadd_exposures"]} 

489 

490 visit_summary_dict = {ref.dataId["visit"]: ref.get() 

491 for ref in inputs["visit_summaries"]} 

492 

493 self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict) 

494 

495 # Write the outputs 

496 for name, property_map in self.property_maps.items(): 

497 if property_map.config.do_min: 

498 butlerQC.put(property_map.min_map, 

499 getattr(outputRefs, f"{name}_map_min")) 

500 if property_map.config.do_max: 

501 butlerQC.put(property_map.max_map, 

502 getattr(outputRefs, f"{name}_map_max")) 

503 if property_map.config.do_mean: 

504 butlerQC.put(property_map.mean_map, 

505 getattr(outputRefs, f"{name}_map_mean")) 

506 if property_map.config.do_weighted_mean: 

507 butlerQC.put(property_map.weighted_mean_map, 

508 getattr(outputRefs, f"{name}_map_weighted_mean")) 

509 if property_map.config.do_sum: 

510 butlerQC.put(property_map.sum_map, 

511 getattr(outputRefs, f"{name}_map_sum")) 

512 

513 def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict): 

514 """Run the healsparse property task. 

515 

516 Parameters 

517 ---------- 

518 sky_map : Sky map object 

519 tract : `int` 

520 Tract number. 

521 band : `str` 

522 Band name for logging. 

523 coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

524 Dictionary of coadd exposure references. Keys are patch numbers. 

525 input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

526 Dictionary of input map references. Keys are patch numbers. 

527 visit_summary_dict : `dict` [`int`: `lsst.afw.table.ExposureCatalog`] 

528 Dictionary of visit summary tables. Keys are visit numbers. 

529 

530 Raises 

531 ------ 

532 RepeatableQuantumError 

533 If visit_summary_dict is missing any visits or detectors found in an 

534 input map. This leads to an inconsistency between what is in the coadd 

535 (via the input map) and the visit summary tables which contain data 

536 to compute the maps. 

537 """ 

538 tract_info = sky_map[tract] 

539 

540 tract_maps_initialized = False 

541 

542 for patch in input_map_dict.keys(): 

543 self.log.info("Making maps for band %s, tract %d, patch %d.", 

544 band, tract, patch) 

545 

546 patch_info = tract_info[patch] 

547 

548 input_map = input_map_dict[patch].get() 

549 if input_map.valid_pixels.size == 0: 

550 self.log.warning("No valid pixels for band %s, tract %d, patch %d; skipping.", 

551 band, tract, patch) 

552 coadd_photo_calib = coadd_dict[patch].get(component="photoCalib") 

553 coadd_inputs = coadd_dict[patch].get(component="coaddInputs") 

554 

555 coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude()) 

556 

557 # Crop input_map to the inner polygon of the patch 

558 poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices() 

559 patch_radec = self._vertices_to_radec(poly_vertices) 

560 patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1], 

561 value=np.arange(input_map.wide_mask_maxbits)) 

562 with warnings.catch_warnings(): 

563 # Healsparse will emit a warning if nside coverage is greater than 

564 # 128. In the case of generating patch input maps, and not global 

565 # maps, high nside coverage works fine, so we can suppress this 

566 # warning. 

567 warnings.simplefilter("ignore") 

568 patch_poly_map = patch_poly.get_map_like(input_map) 

569 input_map = hsp.and_intersection([input_map, patch_poly_map]) 

570 

571 if not tract_maps_initialized: 

572 # We use the first input map nside information to initialize 

573 # the tract maps 

574 nside_coverage = self._compute_nside_coverage_tract(tract_info) 

575 nside = input_map.nside_sparse 

576 

577 do_compute_approx_psf = False 

578 # Initialize the tract maps 

579 for property_map in self.property_maps: 

580 property_map.initialize_tract_maps(nside_coverage, nside) 

581 if property_map.requires_psf: 

582 do_compute_approx_psf = True 

583 

584 tract_maps_initialized = True 

585 

586 valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=True) 

587 

588 # Check if there are no valid pixels for the inner (unique) patch region 

589 if valid_pixels.size == 0: 

590 continue 

591 

592 # Initialize the value accumulators 

593 for property_map in self.property_maps: 

594 property_map.initialize_values(valid_pixels.size) 

595 property_map.zeropoint = coadd_zeropoint 

596 

597 # Initialize the weight and counter accumulators 

598 total_weights = np.zeros(valid_pixels.size) 

599 total_inputs = np.zeros(valid_pixels.size, dtype=np.int32) 

600 

601 for bit, ccd_row in enumerate(coadd_inputs.ccds): 

602 # Which pixels in the map are used by this visit/detector 

603 inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit])) 

604 

605 # Check if there are any valid pixels in the map from this deteector. 

606 if inmap.size == 0: 

607 continue 

608 

609 # visit, detector_id, weight = input_dict[bit] 

610 visit = ccd_row["visit"] 

611 detector_id = ccd_row["ccd"] 

612 weight = ccd_row["weight"] 

613 

614 x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=True) 

615 scalings = self._compute_calib_scale(ccd_row, x, y) 

616 

617 if do_compute_approx_psf: 

618 psf_array = compute_approx_psf_size_and_shape(ccd_row, vpix_ra[inmap], vpix_dec[inmap]) 

619 else: 

620 psf_array = None 

621 

622 total_weights[inmap] += weight 

623 total_inputs[inmap] += 1 

624 

625 # Retrieve the correct visitSummary row 

626 if visit not in visit_summary_dict: 

627 msg = f"Visit {visit} not found in visit_summaries." 

628 raise pipeBase.RepeatableQuantumError(msg) 

629 row = visit_summary_dict[visit].find(detector_id) 

630 if row is None: 

631 msg = f"Visit {visit} / detector_id {detector_id} not found in visit_summaries." 

632 raise pipeBase.RepeatableQuantumError(msg) 

633 

634 # Accumulate the values 

635 for property_map in self.property_maps: 

636 property_map.accumulate_values(inmap, 

637 vpix_ra[inmap], 

638 vpix_dec[inmap], 

639 weight, 

640 scalings, 

641 row, 

642 psf_array=psf_array) 

643 

644 # Finalize the mean values and set the tract maps 

645 for property_map in self.property_maps: 

646 property_map.finalize_mean_values(total_weights, total_inputs) 

647 property_map.set_map_values(valid_pixels) 

648 

649 def _compute_calib_scale(self, ccd_row, x, y): 

650 """Compute calibration scaling values. 

651 

652 Parameters 

653 ---------- 

654 ccd_row : `lsst.afw.table.ExposureRecord` 

655 Exposure metadata for a given detector exposure. 

656 x : `np.ndarray` 

657 Array of x positions. 

658 y : `np.ndarray` 

659 Array of y positions. 

660 

661 Returns 

662 ------- 

663 calib_scale : `np.ndarray` 

664 Array of calibration scale values. 

665 """ 

666 photo_calib = ccd_row.getPhotoCalib() 

667 bf = photo_calib.computeScaledCalibration() 

668 if bf.getBBox() == ccd_row.getBBox(): 

669 # Track variable calibration over the detector 

670 calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y) 

671 else: 

672 # Spatially constant calibration 

673 calib_scale = photo_calib.getCalibrationMean() 

674 

675 return calib_scale 

676 

677 def _vertices_to_radec(self, vertices): 

678 """Convert polygon vertices to ra/dec. 

679 

680 Parameters 

681 ---------- 

682 vertices : `list` [ `lsst.sphgeom.UnitVector3d` ] 

683 Vertices for bounding polygon. 

684 

685 Returns 

686 ------- 

687 radec : `numpy.ndarray` 

688 Nx2 array of ra/dec positions (in degrees) associated with vertices. 

689 """ 

690 lonlats = [lsst.sphgeom.LonLat(x) for x in vertices] 

691 radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees()) for 

692 x in lonlats]) 

693 return radec 

694 

695 def _compute_nside_coverage_tract(self, tract_info): 

696 """Compute the optimal coverage nside for a tract. 

697 

698 Parameters 

699 ---------- 

700 tract_info : `lsst.skymap.tractInfo.ExplicitTractInfo` 

701 Tract information object. 

702 

703 Returns 

704 ------- 

705 nside_coverage : `int` 

706 Optimal coverage nside for a tract map. 

707 """ 

708 num_patches = tract_info.getNumPatches() 

709 

710 # Compute approximate patch area 

711 patch_info = tract_info.getPatchInfo(0) 

712 vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices() 

713 radec = self._vertices_to_radec(vertices) 

714 delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0]) 

715 delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1]) 

716 patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1]))) 

717 

718 tract_area = num_patches[0]*num_patches[1]*patch_area 

719 # Start with a fairly low nside and increase until we find the approximate area. 

720 nside_coverage_tract = 32 

721 while hpg.nside_to_pixel_area(nside_coverage_tract, degrees=True) > tract_area: 

722 nside_coverage_tract = 2*nside_coverage_tract 

723 # Step back one, but don't go bigger pixels than nside=32 or smaller 

724 # than 128 (recommended by healsparse). 

725 nside_coverage_tract = int(np.clip(nside_coverage_tract/2, 32, 128)) 

726 

727 return nside_coverage_tract 

728 

729 

730class ConsolidateHealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections, 

731 dimensions=("band", "skymap",), 

732 defaultTemplates={"coaddName": "deep"}): 

733 sky_map = pipeBase.connectionTypes.Input( 

734 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures", 

735 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

736 storageClass="SkyMap", 

737 dimensions=("skymap",), 

738 ) 

739 

740 # Create output connections for all possible maps defined in the 

741 # registry. The vars() trick used here allows us to set class attributes 

742 # programatically. Taken from 

743 # https://stackoverflow.com/questions/2519807/ 

744 # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class 

745 for name in BasePropertyMap.registry: 

746 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Input( 

747 doc=f"Minimum-value map of {name}", 

748 name=f"{{coaddName}}Coadd_{name}_map_min", 

749 storageClass="HealSparseMap", 

750 dimensions=("tract", "skymap", "band"), 

751 multiple=True, 

752 deferLoad=True, 

753 ) 

754 vars()[f"{name}_consolidated_map_min"] = pipeBase.connectionTypes.Output( 

755 doc=f"Minumum-value map of {name}", 

756 name=f"{{coaddName}}Coadd_{name}_consolidated_map_min", 

757 storageClass="HealSparseMap", 

758 dimensions=("skymap", "band"), 

759 ) 

760 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Input( 

761 doc=f"Maximum-value map of {name}", 

762 name=f"{{coaddName}}Coadd_{name}_map_max", 

763 storageClass="HealSparseMap", 

764 dimensions=("tract", "skymap", "band"), 

765 multiple=True, 

766 deferLoad=True, 

767 ) 

768 vars()[f"{name}_consolidated_map_max"] = pipeBase.connectionTypes.Output( 

769 doc=f"Minumum-value map of {name}", 

770 name=f"{{coaddName}}Coadd_{name}_consolidated_map_max", 

771 storageClass="HealSparseMap", 

772 dimensions=("skymap", "band"), 

773 ) 

774 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Input( 

775 doc=f"Mean-value map of {name}", 

776 name=f"{{coaddName}}Coadd_{name}_map_mean", 

777 storageClass="HealSparseMap", 

778 dimensions=("tract", "skymap", "band"), 

779 multiple=True, 

780 deferLoad=True, 

781 ) 

782 vars()[f"{name}_consolidated_map_mean"] = pipeBase.connectionTypes.Output( 

783 doc=f"Minumum-value map of {name}", 

784 name=f"{{coaddName}}Coadd_{name}_consolidated_map_mean", 

785 storageClass="HealSparseMap", 

786 dimensions=("skymap", "band"), 

787 ) 

788 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Input( 

789 doc=f"Weighted mean-value map of {name}", 

790 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean", 

791 storageClass="HealSparseMap", 

792 dimensions=("tract", "skymap", "band"), 

793 multiple=True, 

794 deferLoad=True, 

795 ) 

796 vars()[f"{name}_consolidated_map_weighted_mean"] = pipeBase.connectionTypes.Output( 

797 doc=f"Minumum-value map of {name}", 

798 name=f"{{coaddName}}Coadd_{name}_consolidated_map_weighted_mean", 

799 storageClass="HealSparseMap", 

800 dimensions=("skymap", "band"), 

801 ) 

802 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Input( 

803 doc=f"Sum-value map of {name}", 

804 name=f"{{coaddName}}Coadd_{name}_map_sum", 

805 storageClass="HealSparseMap", 

806 dimensions=("tract", "skymap", "band"), 

807 multiple=True, 

808 deferLoad=True, 

809 ) 

810 vars()[f"{name}_consolidated_map_sum"] = pipeBase.connectionTypes.Output( 

811 doc=f"Minumum-value map of {name}", 

812 name=f"{{coaddName}}Coadd_{name}_consolidated_map_sum", 

813 storageClass="HealSparseMap", 

814 dimensions=("skymap", "band"), 

815 ) 

816 

817 def __init__(self, *, config=None): 

818 super().__init__(config=config) 

819 

820 # Not all possible maps in the registry will be configured to run. 

821 # Here we remove the unused connections. 

822 for name in BasePropertyMap.registry: 

823 if name not in config.property_maps: 

824 prop_config = BasePropertyMapConfig() 

825 prop_config.do_min = False 

826 prop_config.do_max = False 

827 prop_config.do_mean = False 

828 prop_config.do_weighted_mean = False 

829 prop_config.do_sum = False 

830 else: 

831 prop_config = config.property_maps[name] 

832 

833 if not prop_config.do_min: 

834 self.inputs.remove(f"{name}_map_min") 

835 self.outputs.remove(f"{name}_consolidated_map_min") 

836 if not prop_config.do_max: 

837 self.inputs.remove(f"{name}_map_max") 

838 self.outputs.remove(f"{name}_consolidated_map_max") 

839 if not prop_config.do_mean: 

840 self.inputs.remove(f"{name}_map_mean") 

841 self.outputs.remove(f"{name}_consolidated_map_mean") 

842 if not prop_config.do_weighted_mean: 

843 self.inputs.remove(f"{name}_map_weighted_mean") 

844 self.outputs.remove(f"{name}_consolidated_map_weighted_mean") 

845 if not prop_config.do_sum: 

846 self.inputs.remove(f"{name}_map_sum") 

847 self.outputs.remove(f"{name}_consolidated_map_sum") 

848 

849 

850class ConsolidateHealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig, 

851 pipelineConnections=ConsolidateHealSparsePropertyMapConnections): 

852 """Configuration parameters for ConsolidateHealSparsePropertyMapTask""" 

853 property_maps = BasePropertyMap.registry.makeField( 

854 multi=True, 

855 default=["exposure_time", 

856 "psf_size", 

857 "psf_e1", 

858 "psf_e2", 

859 "psf_maglim", 

860 "sky_noise", 

861 "sky_background", 

862 "dcr_dra", 

863 "dcr_ddec", 

864 "dcr_e1", 

865 "dcr_e2", 

866 "epoch"], 

867 doc="Property map computation objects", 

868 ) 

869 nside_coverage = pexConfig.Field( 

870 doc="Consolidated HealSparse coverage map nside. Must be power of 2.", 

871 dtype=int, 

872 default=32, 

873 check=_is_power_of_two, 

874 ) 

875 

876 def setDefaults(self): 

877 self.property_maps["exposure_time"].do_sum = True 

878 self.property_maps["psf_size"].do_weighted_mean = True 

879 self.property_maps["psf_e1"].do_weighted_mean = True 

880 self.property_maps["psf_e2"].do_weighted_mean = True 

881 self.property_maps["psf_maglim"].do_weighted_mean = True 

882 self.property_maps["sky_noise"].do_weighted_mean = True 

883 self.property_maps["sky_background"].do_weighted_mean = True 

884 self.property_maps["dcr_dra"].do_weighted_mean = True 

885 self.property_maps["dcr_ddec"].do_weighted_mean = True 

886 self.property_maps["dcr_e1"].do_weighted_mean = True 

887 self.property_maps["dcr_e2"].do_weighted_mean = True 

888 self.property_maps["epoch"].do_mean = True 

889 self.property_maps["epoch"].do_min = True 

890 self.property_maps["epoch"].do_max = True 

891 

892 

893class ConsolidateHealSparsePropertyMapTask(pipeBase.PipelineTask): 

894 """Task to consolidate HealSparse property maps. 

895 

896 This task will take all the individual tract-based maps (per map type, 

897 per band) and consolidate them into one survey-wide map (per map type, 

898 per band). Each tract map is truncated to its inner region before 

899 consolidation. 

900 """ 

901 ConfigClass = ConsolidateHealSparsePropertyMapConfig 

902 _DefaultName = "consolidateHealSparsePropertyMapTask" 

903 

904 def __init__(self, **kwargs): 

905 super().__init__(**kwargs) 

906 self.property_maps = PropertyMapMap() 

907 for name, config, PropertyMapClass in self.config.property_maps.apply(): 

908 self.property_maps[name] = PropertyMapClass(config, name) 

909 

910 @timeMethod 

911 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

912 inputs = butlerQC.get(inputRefs) 

913 

914 sky_map = inputs.pop("sky_map") 

915 

916 # These need to be consolidated one at a time to conserve memory. 

917 for name in self.config.property_maps.names: 

918 for type_ in ['min', 'max', 'mean', 'weighted_mean', 'sum']: 

919 map_type = f"{name}_map_{type_}" 

920 if map_type in inputs: 

921 input_refs = {ref.dataId['tract']: ref 

922 for ref in inputs[map_type]} 

923 consolidated_map = self.consolidate_map(sky_map, input_refs) 

924 butlerQC.put(consolidated_map, 

925 getattr(outputRefs, f"{name}_consolidated_map_{type_}")) 

926 

927 def consolidate_map(self, sky_map, input_refs): 

928 """Consolidate the healsparse property maps. 

929 

930 Parameters 

931 ---------- 

932 sky_map : Sky map object 

933 input_refs : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

934 Dictionary of tract_id mapping to dataref. 

935 

936 Returns 

937 ------- 

938 consolidated_map : `healsparse.HealSparseMap` 

939 Consolidated HealSparse map. 

940 """ 

941 # First, we read in the coverage maps to know how much memory 

942 # to allocate 

943 cov_mask = None 

944 nside_coverage_inputs = None 

945 for tract_id in input_refs: 

946 cov = input_refs[tract_id].get(component='coverage') 

947 if cov_mask is None: 

948 cov_mask = cov.coverage_mask 

949 nside_coverage_inputs = cov.nside_coverage 

950 else: 

951 cov_mask |= cov.coverage_mask 

952 

953 cov_pix_inputs, = np.where(cov_mask) 

954 

955 # Compute the coverage pixels for the desired nside_coverage 

956 if nside_coverage_inputs == self.config.nside_coverage: 

957 cov_pix = cov_pix_inputs 

958 elif nside_coverage_inputs > self.config.nside_coverage: 

959 # Converting from higher resolution coverage to lower 

960 # resolution coverage. 

961 bit_shift = hsp.utils._compute_bitshift(self.config.nside_coverage, 

962 nside_coverage_inputs) 

963 cov_pix = np.right_shift(cov_pix_inputs, bit_shift) 

964 else: 

965 # Converting from lower resolution coverage to higher 

966 # resolution coverage. 

967 bit_shift = hsp.utils._compute_bitshift(nside_coverage_inputs, 

968 self.config.nside_coverage) 

969 cov_pix = np.left_shift(cov_pix_inputs, bit_shift) 

970 

971 # Now read in each tract map and build the consolidated map. 

972 consolidated_map = None 

973 for tract_id in input_refs: 

974 input_map = input_refs[tract_id].get() 

975 if consolidated_map is None: 

976 consolidated_map = hsp.HealSparseMap.make_empty( 

977 self.config.nside_coverage, 

978 input_map.nside_sparse, 

979 input_map.dtype, 

980 sentinel=input_map._sentinel, 

981 cov_pixels=cov_pix) 

982 

983 # Only use pixels that are properly inside the tract. 

984 vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=True) 

985 vpix_tract_ids = sky_map.findTractIdArray(ra, dec, degrees=True) 

986 

987 in_tract = (vpix_tract_ids == tract_id) 

988 

989 consolidated_map[vpix[in_tract]] = input_map[vpix[in_tract]] 

990 

991 return consolidated_map