Coverage for python/lsst/pipe/tasks/healSparseMapping.py: 20%

381 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2022-08-18 12:37 -0700

1# 

2# LSST Data Management System 

3# Copyright 2008-2021 AURA/LSST. 

4# 

5# This product includes software developed by the 

6# LSST Project (http://www.lsst.org/). 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <http://www.lsstcorp.org/LegalNotices/>. 

21# 

22from collections import defaultdict 

23import warnings 

24import numbers 

25import numpy as np 

26import hpgeom as hpg 

27import healsparse as hsp 

28 

29import lsst.pex.config as pexConfig 

30import lsst.pipe.base as pipeBase 

31import lsst.geom 

32import lsst.afw.geom as afwGeom 

33from lsst.daf.butler import Formatter 

34from lsst.skymap import BaseSkyMap 

35from lsst.utils.timer import timeMethod 

36from .healSparseMappingProperties import (BasePropertyMap, BasePropertyMapConfig, 

37 PropertyMapMap, compute_approx_psf_size_and_shape) 

38 

39 

40__all__ = ["HealSparseInputMapTask", "HealSparseInputMapConfig", 

41 "HealSparseMapFormatter", "HealSparsePropertyMapConnections", 

42 "HealSparsePropertyMapConfig", "HealSparsePropertyMapTask", 

43 "ConsolidateHealSparsePropertyMapConnections", 

44 "ConsolidateHealSparsePropertyMapConfig", 

45 "ConsolidateHealSparsePropertyMapTask"] 

46 

47 

48class HealSparseMapFormatter(Formatter): 

49 """Interface for reading and writing healsparse.HealSparseMap files.""" 

50 unsupportedParameters = frozenset() 

51 supportedExtensions = frozenset({".hsp", ".fit", ".fits"}) 

52 extension = '.hsp' 

53 

54 def read(self, component=None): 

55 # Docstring inherited from Formatter.read. 

56 path = self.fileDescriptor.location.path 

57 

58 if component == 'coverage': 

59 try: 

60 data = hsp.HealSparseCoverage.read(path) 

61 except (OSError, RuntimeError): 

62 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}") 

63 

64 return data 

65 

66 if self.fileDescriptor.parameters is None: 

67 pixels = None 

68 degrade_nside = None 

69 else: 

70 pixels = self.fileDescriptor.parameters.get('pixels', None) 

71 degrade_nside = self.fileDescriptor.parameters.get('degrade_nside', None) 

72 try: 

73 data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside) 

74 except (OSError, RuntimeError): 

75 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}") 

76 

77 return data 

78 

79 def write(self, inMemoryDataset): 

80 # Docstring inherited from Formatter.write. 

81 # Update the location with the formatter-preferred file extension 

82 self.fileDescriptor.location.updateExtension(self.extension) 

83 inMemoryDataset.write(self.fileDescriptor.location.path, clobber=True) 

84 

85 

86def _is_power_of_two(value): 

87 """Check that value is a power of two. 

88 

89 Parameters 

90 ---------- 

91 value : `int` 

92 Value to check. 

93 

94 Returns 

95 ------- 

96 is_power_of_two : `bool` 

97 True if value is a power of two; False otherwise, or 

98 if value is not an integer. 

99 """ 

100 if not isinstance(value, numbers.Integral): 

101 return False 

102 

103 # See https://stackoverflow.com/questions/57025836 

104 # Every power of 2 has exactly 1 bit set to 1; subtracting 

105 # 1 therefore flips every preceding bit. If you and that 

106 # together with the original value it must be 0. 

107 return (value & (value - 1) == 0) and value != 0 

108 

109 

110class HealSparseInputMapConfig(pexConfig.Config): 

111 """Configuration parameters for HealSparseInputMapTask""" 

112 nside = pexConfig.Field( 

113 doc="Mapping healpix nside. Must be power of 2.", 

114 dtype=int, 

115 default=32768, 

116 check=_is_power_of_two, 

117 ) 

118 nside_coverage = pexConfig.Field( 

119 doc="HealSparse coverage map nside. Must be power of 2.", 

120 dtype=int, 

121 default=256, 

122 check=_is_power_of_two, 

123 ) 

124 bad_mask_min_coverage = pexConfig.Field( 

125 doc=("Minimum area fraction of a map healpixel pixel that must be " 

126 "covered by bad pixels to be removed from the input map. " 

127 "This is approximate."), 

128 dtype=float, 

129 default=0.5, 

130 ) 

131 

132 

133class HealSparseInputMapTask(pipeBase.Task): 

134 """Task for making a HealSparse input map.""" 

135 

136 ConfigClass = HealSparseInputMapConfig 

137 _DefaultName = "healSparseInputMap" 

138 

139 def __init__(self, **kwargs): 

140 pipeBase.Task.__init__(self, **kwargs) 

141 

142 self.ccd_input_map = None 

143 

144 def build_ccd_input_map(self, bbox, wcs, ccds): 

145 """Build a map from ccd valid polygons or bounding boxes. 

146 

147 Parameters 

148 ---------- 

149 bbox : `lsst.geom.Box2I` 

150 Bounding box for region to build input map. 

151 wcs : `lsst.afw.geom.SkyWcs` 

152 WCS object for region to build input map. 

153 ccds : `lsst.afw.table.ExposureCatalog` 

154 Exposure catalog with ccd data from coadd inputs. 

155 """ 

156 with warnings.catch_warnings(): 

157 # Healsparse will emit a warning if nside coverage is greater than 

158 # 128. In the case of generating patch input maps, and not global 

159 # maps, high nside coverage works fine, so we can suppress this 

160 # warning. 

161 warnings.simplefilter("ignore") 

162 self.ccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage, 

163 nside_sparse=self.config.nside, 

164 dtype=hsp.WIDE_MASK, 

165 wide_mask_maxbits=len(ccds)) 

166 self._wcs = wcs 

167 self._bbox = bbox 

168 self._ccds = ccds 

169 

170 pixel_scale = wcs.getPixelScale().asArcseconds() 

171 hpix_area_arcsec2 = hpg.nside_to_pixel_area(self.config.nside, degrees=True)*(3600.**2.) 

172 self._min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.) 

173 

174 metadata = {} 

175 self._bits_per_visit_ccd = {} 

176 self._bits_per_visit = defaultdict(list) 

177 for bit, ccd_row in enumerate(ccds): 

178 metadata[f"B{bit:04d}CCD"] = ccd_row["ccd"] 

179 metadata[f"B{bit:04d}VIS"] = ccd_row["visit"] 

180 metadata[f"B{bit:04d}WT"] = ccd_row["weight"] 

181 

182 self._bits_per_visit_ccd[(ccd_row["visit"], ccd_row["ccd"])] = bit 

183 self._bits_per_visit[ccd_row["visit"]].append(bit) 

184 

185 ccd_poly = ccd_row.getValidPolygon() 

186 if ccd_poly is None: 

187 ccd_poly = afwGeom.Polygon(lsst.geom.Box2D(ccd_row.getBBox())) 

188 # Detectors need to be rendered with their own wcs. 

189 ccd_poly_radec = self._pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices()) 

190 

191 # Create a ccd healsparse polygon 

192 poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0], 

193 dec=ccd_poly_radec[: -1, 1], 

194 value=[bit]) 

195 self.ccd_input_map.set_bits_pix(poly.get_pixels(nside=self.ccd_input_map.nside_sparse), 

196 [bit]) 

197 

198 # Cut down to the overall bounding box with associated wcs. 

199 bbox_afw_poly = afwGeom.Polygon(lsst.geom.Box2D(bbox)) 

200 bbox_poly_radec = self._pixels_to_radec(self._wcs, 

201 bbox_afw_poly.convexHull().getVertices()) 

202 bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1], 

203 value=np.arange(self.ccd_input_map.wide_mask_maxbits)) 

204 bbox_poly_map = bbox_poly.get_map_like(self.ccd_input_map) 

205 self.ccd_input_map = hsp.and_intersection([self.ccd_input_map, bbox_poly_map]) 

206 self.ccd_input_map.metadata = metadata 

207 

208 # Create a temporary map to hold the count of bad pixels in each healpix pixel 

209 self._ccd_input_pixels = self.ccd_input_map.valid_pixels 

210 

211 dtype = [(f"v{visit}", np.int64) for visit in self._bits_per_visit.keys()] 

212 

213 with warnings.catch_warnings(): 

214 # Healsparse will emit a warning if nside coverage is greater than 

215 # 128. In the case of generating patch input maps, and not global 

216 # maps, high nside coverage works fine, so we can suppress this 

217 # warning. 

218 warnings.simplefilter("ignore") 

219 self._ccd_input_bad_count_map = hsp.HealSparseMap.make_empty( 

220 nside_coverage=self.config.nside_coverage, 

221 nside_sparse=self.config.nside, 

222 dtype=dtype, 

223 primary=dtype[0][0]) 

224 

225 # Don't set input bad map if there are no ccds which overlap the bbox. 

226 if len(self._ccd_input_pixels) > 0: 

227 self._ccd_input_bad_count_map[self._ccd_input_pixels] = np.zeros(1, dtype=dtype) 

228 

229 def mask_warp_bbox(self, bbox, visit, mask, bit_mask_value): 

230 """Mask a subregion from a visit. 

231 This must be run after build_ccd_input_map initializes 

232 the overall map. 

233 

234 Parameters 

235 ---------- 

236 bbox : `lsst.geom.Box2I` 

237 Bounding box from region to mask. 

238 visit : `int` 

239 Visit number corresponding to warp with mask. 

240 mask : `lsst.afw.image.MaskX` 

241 Mask plane from warp exposure. 

242 bit_mask_value : `int` 

243 Bit mask to check for bad pixels. 

244 

245 Raises 

246 ------ 

247 RuntimeError : Raised if build_ccd_input_map was not run first. 

248 """ 

249 if self.ccd_input_map is None: 

250 raise RuntimeError("Must run build_ccd_input_map before mask_warp_bbox") 

251 

252 # Find the bad pixels and convert to healpix 

253 bad_pixels = np.where(mask.array & bit_mask_value) 

254 if len(bad_pixels[0]) == 0: 

255 # No bad pixels 

256 return 

257 

258 # Bad pixels come from warps which use the overall wcs. 

259 bad_ra, bad_dec = self._wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64), 

260 bad_pixels[0].astype(np.float64), 

261 degrees=True) 

262 bad_hpix = hpg.angle_to_pixel(self.config.nside, bad_ra, bad_dec) 

263 

264 # Count the number of bad image pixels in each healpix pixel 

265 min_bad_hpix = bad_hpix.min() 

266 bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32) 

267 np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1) 

268 

269 # Add these to the accumulator map. 

270 # We need to make sure that the "primary" array has valid values for 

271 # this pixel to be registered in the accumulator map. 

272 pix_to_add, = np.where(bad_hpix_count > 0) 

273 count_map_arr = self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] 

274 primary = self._ccd_input_bad_count_map.primary 

275 count_map_arr[primary] = np.clip(count_map_arr[primary], 0, None) 

276 

277 count_map_arr[f"v{visit}"] = np.clip(count_map_arr[f"v{visit}"], 0, None) 

278 count_map_arr[f"v{visit}"] += bad_hpix_count[pix_to_add] 

279 

280 self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] = count_map_arr 

281 

282 def finalize_ccd_input_map_mask(self): 

283 """Use accumulated mask information to finalize the masking of 

284 ccd_input_map. 

285 

286 Raises 

287 ------ 

288 RuntimeError : Raised if build_ccd_input_map was not run first. 

289 """ 

290 if self.ccd_input_map is None: 

291 raise RuntimeError("Must run build_ccd_input_map before finalize_ccd_input_map_mask.") 

292 

293 count_map_arr = self._ccd_input_bad_count_map[self._ccd_input_pixels] 

294 for visit in self._bits_per_visit: 

295 to_mask, = np.where(count_map_arr[f"v{visit}"] > self._min_bad) 

296 if to_mask.size == 0: 

297 continue 

298 self.ccd_input_map.clear_bits_pix(self._ccd_input_pixels[to_mask], 

299 self._bits_per_visit[visit]) 

300 

301 # Clear memory 

302 self._ccd_input_bad_count_map = None 

303 

304 def _pixels_to_radec(self, wcs, pixels): 

305 """Convert pixels to ra/dec positions using a wcs. 

306 

307 Parameters 

308 ---------- 

309 wcs : `lsst.afw.geom.SkyWcs` 

310 WCS object. 

311 pixels : `list` [`lsst.geom.Point2D`] 

312 List of pixels to convert. 

313 

314 Returns 

315 ------- 

316 radec : `numpy.ndarray` 

317 Nx2 array of ra/dec positions associated with pixels. 

318 """ 

319 sph_pts = wcs.pixelToSky(pixels) 

320 return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees()) 

321 for sph in sph_pts]) 

322 

323 

324class HealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections, 

325 dimensions=("tract", "band", "skymap",), 

326 defaultTemplates={"coaddName": "deep", 

327 "calexpType": ""}): 

328 input_maps = pipeBase.connectionTypes.Input( 

329 doc="Healsparse bit-wise coadd input maps", 

330 name="{coaddName}Coadd_inputMap", 

331 storageClass="HealSparseMap", 

332 dimensions=("tract", "patch", "skymap", "band"), 

333 multiple=True, 

334 deferLoad=True, 

335 ) 

336 coadd_exposures = pipeBase.connectionTypes.Input( 

337 doc="Coadded exposures associated with input_maps", 

338 name="{coaddName}Coadd", 

339 storageClass="ExposureF", 

340 dimensions=("tract", "patch", "skymap", "band"), 

341 multiple=True, 

342 deferLoad=True, 

343 ) 

344 visit_summaries = pipeBase.connectionTypes.Input( 

345 doc="Visit summary tables with aggregated statistics", 

346 name="{calexpType}visitSummary", 

347 storageClass="ExposureCatalog", 

348 dimensions=("instrument", "visit"), 

349 multiple=True, 

350 deferLoad=True, 

351 ) 

352 sky_map = pipeBase.connectionTypes.Input( 

353 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures", 

354 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

355 storageClass="SkyMap", 

356 dimensions=("skymap",), 

357 ) 

358 

359 # Create output connections for all possible maps defined in the 

360 # registry. The vars() trick used here allows us to set class attributes 

361 # programatically. Taken from 

362 # https://stackoverflow.com/questions/2519807/ 

363 # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class 

364 for name in BasePropertyMap.registry: 

365 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Output( 

366 doc=f"Minimum-value map of {name}", 

367 name=f"{{coaddName}}Coadd_{name}_map_min", 

368 storageClass="HealSparseMap", 

369 dimensions=("tract", "skymap", "band"), 

370 ) 

371 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Output( 

372 doc=f"Maximum-value map of {name}", 

373 name=f"{{coaddName}}Coadd_{name}_map_max", 

374 storageClass="HealSparseMap", 

375 dimensions=("tract", "skymap", "band"), 

376 ) 

377 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Output( 

378 doc=f"Mean-value map of {name}", 

379 name=f"{{coaddName}}Coadd_{name}_map_mean", 

380 storageClass="HealSparseMap", 

381 dimensions=("tract", "skymap", "band"), 

382 ) 

383 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output( 

384 doc=f"Weighted mean-value map of {name}", 

385 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean", 

386 storageClass="HealSparseMap", 

387 dimensions=("tract", "skymap", "band"), 

388 ) 

389 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Output( 

390 doc=f"Sum-value map of {name}", 

391 name=f"{{coaddName}}Coadd_{name}_map_sum", 

392 storageClass="HealSparseMap", 

393 dimensions=("tract", "skymap", "band"), 

394 ) 

395 

396 def __init__(self, *, config=None): 

397 super().__init__(config=config) 

398 

399 # Not all possible maps in the registry will be configured to run. 

400 # Here we remove the unused connections. 

401 for name in BasePropertyMap.registry: 

402 if name not in config.property_maps: 

403 prop_config = BasePropertyMapConfig() 

404 prop_config.do_min = False 

405 prop_config.do_max = False 

406 prop_config.do_mean = False 

407 prop_config.do_weighted_mean = False 

408 prop_config.do_sum = False 

409 else: 

410 prop_config = config.property_maps[name] 

411 

412 if not prop_config.do_min: 

413 self.outputs.remove(f"{name}_map_min") 

414 if not prop_config.do_max: 

415 self.outputs.remove(f"{name}_map_max") 

416 if not prop_config.do_mean: 

417 self.outputs.remove(f"{name}_map_mean") 

418 if not prop_config.do_weighted_mean: 

419 self.outputs.remove(f"{name}_map_weighted_mean") 

420 if not prop_config.do_sum: 

421 self.outputs.remove(f"{name}_map_sum") 

422 

423 

424class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig, 

425 pipelineConnections=HealSparsePropertyMapConnections): 

426 """Configuration parameters for HealSparsePropertyMapTask""" 

427 property_maps = BasePropertyMap.registry.makeField( 

428 multi=True, 

429 default=["exposure_time", 

430 "psf_size", 

431 "psf_e1", 

432 "psf_e2", 

433 "psf_maglim", 

434 "sky_noise", 

435 "sky_background", 

436 "dcr_dra", 

437 "dcr_ddec", 

438 "dcr_e1", 

439 "dcr_e2"], 

440 doc="Property map computation objects", 

441 ) 

442 

443 def setDefaults(self): 

444 self.property_maps["exposure_time"].do_sum = True 

445 self.property_maps["psf_size"].do_weighted_mean = True 

446 self.property_maps["psf_e1"].do_weighted_mean = True 

447 self.property_maps["psf_e2"].do_weighted_mean = True 

448 self.property_maps["psf_maglim"].do_weighted_mean = True 

449 self.property_maps["sky_noise"].do_weighted_mean = True 

450 self.property_maps["sky_background"].do_weighted_mean = True 

451 self.property_maps["dcr_dra"].do_weighted_mean = True 

452 self.property_maps["dcr_ddec"].do_weighted_mean = True 

453 self.property_maps["dcr_e1"].do_weighted_mean = True 

454 self.property_maps["dcr_e2"].do_weighted_mean = True 

455 

456 

457class HealSparsePropertyMapTask(pipeBase.PipelineTask): 

458 """Task to compute Healsparse property maps. 

459 

460 This task will compute individual property maps (per tract, per 

461 map type, per band). These maps cover the full coadd tract, and 

462 are not truncated to the inner tract region. 

463 """ 

464 ConfigClass = HealSparsePropertyMapConfig 

465 _DefaultName = "healSparsePropertyMapTask" 

466 

467 def __init__(self, **kwargs): 

468 super().__init__(**kwargs) 

469 self.property_maps = PropertyMapMap() 

470 for name, config, PropertyMapClass in self.config.property_maps.apply(): 

471 self.property_maps[name] = PropertyMapClass(config, name) 

472 

473 @timeMethod 

474 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

475 inputs = butlerQC.get(inputRefs) 

476 

477 sky_map = inputs.pop("sky_map") 

478 

479 tract = butlerQC.quantum.dataId["tract"] 

480 band = butlerQC.quantum.dataId["band"] 

481 

482 input_map_dict = {ref.dataId["patch"]: ref for ref in inputs["input_maps"]} 

483 coadd_dict = {ref.dataId["patch"]: ref for ref in inputs["coadd_exposures"]} 

484 

485 visit_summary_dict = {ref.dataId["visit"]: ref.get() 

486 for ref in inputs["visit_summaries"]} 

487 

488 self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict) 

489 

490 # Write the outputs 

491 for name, property_map in self.property_maps.items(): 

492 if property_map.config.do_min: 

493 butlerQC.put(property_map.min_map, 

494 getattr(outputRefs, f"{name}_map_min")) 

495 if property_map.config.do_max: 

496 butlerQC.put(property_map.max_map, 

497 getattr(outputRefs, f"{name}_map_max")) 

498 if property_map.config.do_mean: 

499 butlerQC.put(property_map.mean_map, 

500 getattr(outputRefs, f"{name}_map_mean")) 

501 if property_map.config.do_weighted_mean: 

502 butlerQC.put(property_map.weighted_mean_map, 

503 getattr(outputRefs, f"{name}_map_weighted_mean")) 

504 if property_map.config.do_sum: 

505 butlerQC.put(property_map.sum_map, 

506 getattr(outputRefs, f"{name}_map_sum")) 

507 

508 def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict): 

509 """Run the healsparse property task. 

510 

511 Parameters 

512 ---------- 

513 sky_map : Sky map object 

514 tract : `int` 

515 Tract number. 

516 band : `str` 

517 Band name for logging. 

518 coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

519 Dictionary of coadd exposure references. Keys are patch numbers. 

520 input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

521 Dictionary of input map references. Keys are patch numbers. 

522 visit_summary_dict : `dict` [`int`: `lsst.afw.table.ExposureCatalog`] 

523 Dictionary of visit summary tables. Keys are visit numbers. 

524 

525 Raises 

526 ------ 

527 RepeatableQuantumError 

528 If visit_summary_dict is missing any visits or detectors found in an 

529 input map. This leads to an inconsistency between what is in the coadd 

530 (via the input map) and the visit summary tables which contain data 

531 to compute the maps. 

532 """ 

533 tract_info = sky_map[tract] 

534 

535 tract_maps_initialized = False 

536 

537 for patch in input_map_dict.keys(): 

538 self.log.info("Making maps for band %s, tract %d, patch %d.", 

539 band, tract, patch) 

540 

541 patch_info = tract_info[patch] 

542 

543 input_map = input_map_dict[patch].get() 

544 coadd_photo_calib = coadd_dict[patch].get(component="photoCalib") 

545 coadd_inputs = coadd_dict[patch].get(component="coaddInputs") 

546 

547 coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude()) 

548 

549 # Crop input_map to the inner polygon of the patch 

550 poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices() 

551 patch_radec = self._vertices_to_radec(poly_vertices) 

552 patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1], 

553 value=np.arange(input_map.wide_mask_maxbits)) 

554 patch_poly_map = patch_poly.get_map_like(input_map) 

555 input_map = hsp.and_intersection([input_map, patch_poly_map]) 

556 

557 if not tract_maps_initialized: 

558 # We use the first input map nside information to initialize 

559 # the tract maps 

560 nside_coverage = self._compute_nside_coverage_tract(tract_info) 

561 nside = input_map.nside_sparse 

562 

563 do_compute_approx_psf = False 

564 # Initialize the tract maps 

565 for property_map in self.property_maps: 

566 property_map.initialize_tract_maps(nside_coverage, nside) 

567 if property_map.requires_psf: 

568 do_compute_approx_psf = True 

569 

570 tract_maps_initialized = True 

571 

572 valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=True) 

573 

574 # Check if there are no valid pixels for the inner (unique) patch region 

575 if valid_pixels.size == 0: 

576 continue 

577 

578 # Initialize the value accumulators 

579 for property_map in self.property_maps: 

580 property_map.initialize_values(valid_pixels.size) 

581 property_map.zeropoint = coadd_zeropoint 

582 

583 # Initialize the weight and counter accumulators 

584 total_weights = np.zeros(valid_pixels.size) 

585 total_inputs = np.zeros(valid_pixels.size, dtype=np.int32) 

586 

587 for bit, ccd_row in enumerate(coadd_inputs.ccds): 

588 # Which pixels in the map are used by this visit/detector 

589 inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit])) 

590 

591 # Check if there are any valid pixels in the map from this deteector. 

592 if inmap.size == 0: 

593 continue 

594 

595 # visit, detector_id, weight = input_dict[bit] 

596 visit = ccd_row["visit"] 

597 detector_id = ccd_row["ccd"] 

598 weight = ccd_row["weight"] 

599 

600 x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=True) 

601 scalings = self._compute_calib_scale(ccd_row, x, y) 

602 

603 if do_compute_approx_psf: 

604 psf_array = compute_approx_psf_size_and_shape(ccd_row, vpix_ra[inmap], vpix_dec[inmap]) 

605 else: 

606 psf_array = None 

607 

608 total_weights[inmap] += weight 

609 total_inputs[inmap] += 1 

610 

611 # Retrieve the correct visitSummary row 

612 if visit not in visit_summary_dict: 

613 msg = f"Visit {visit} not found in visit_summaries." 

614 raise pipeBase.RepeatableQuantumError(msg) 

615 row = visit_summary_dict[visit].find(detector_id) 

616 if row is None: 

617 msg = f"Visit {visit} / detector_id {detector_id} not found in visit_summaries." 

618 raise pipeBase.RepeatableQuantumError(msg) 

619 

620 # Accumulate the values 

621 for property_map in self.property_maps: 

622 property_map.accumulate_values(inmap, 

623 vpix_ra[inmap], 

624 vpix_dec[inmap], 

625 weight, 

626 scalings, 

627 row, 

628 psf_array=psf_array) 

629 

630 # Finalize the mean values and set the tract maps 

631 for property_map in self.property_maps: 

632 property_map.finalize_mean_values(total_weights, total_inputs) 

633 property_map.set_map_values(valid_pixels) 

634 

635 def _compute_calib_scale(self, ccd_row, x, y): 

636 """Compute calibration scaling values. 

637 

638 Parameters 

639 ---------- 

640 ccd_row : `lsst.afw.table.ExposureRecord` 

641 Exposure metadata for a given detector exposure. 

642 x : `np.ndarray` 

643 Array of x positions. 

644 y : `np.ndarray` 

645 Array of y positions. 

646 

647 Returns 

648 ------- 

649 calib_scale : `np.ndarray` 

650 Array of calibration scale values. 

651 """ 

652 photo_calib = ccd_row.getPhotoCalib() 

653 bf = photo_calib.computeScaledCalibration() 

654 if bf.getBBox() == ccd_row.getBBox(): 

655 # Track variable calibration over the detector 

656 calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y) 

657 else: 

658 # Spatially constant calibration 

659 calib_scale = photo_calib.getCalibrationMean() 

660 

661 return calib_scale 

662 

663 def _vertices_to_radec(self, vertices): 

664 """Convert polygon vertices to ra/dec. 

665 

666 Parameters 

667 ---------- 

668 vertices : `list` [ `lsst.sphgeom.UnitVector3d` ] 

669 Vertices for bounding polygon. 

670 

671 Returns 

672 ------- 

673 radec : `numpy.ndarray` 

674 Nx2 array of ra/dec positions (in degrees) associated with vertices. 

675 """ 

676 lonlats = [lsst.sphgeom.LonLat(x) for x in vertices] 

677 radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees()) for 

678 x in lonlats]) 

679 return radec 

680 

681 def _compute_nside_coverage_tract(self, tract_info): 

682 """Compute the optimal coverage nside for a tract. 

683 

684 Parameters 

685 ---------- 

686 tract_info : `lsst.skymap.tractInfo.ExplicitTractInfo` 

687 Tract information object. 

688 

689 Returns 

690 ------- 

691 nside_coverage : `int` 

692 Optimal coverage nside for a tract map. 

693 """ 

694 num_patches = tract_info.getNumPatches() 

695 

696 # Compute approximate patch area 

697 patch_info = tract_info.getPatchInfo(0) 

698 vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices() 

699 radec = self._vertices_to_radec(vertices) 

700 delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0]) 

701 delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1]) 

702 patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1]))) 

703 

704 tract_area = num_patches[0]*num_patches[1]*patch_area 

705 # Start with a fairly low nside and increase until we find the approximate area. 

706 nside_coverage_tract = 32 

707 while hpg.nside_to_pixel_area(nside_coverage_tract, degrees=True) > tract_area: 

708 nside_coverage_tract = 2*nside_coverage_tract 

709 # Step back one, but don't go bigger pixels than nside=32 or smaller 

710 # than 128 (recommended by healsparse). 

711 nside_coverage_tract = int(np.clip(nside_coverage_tract/2, 32, 128)) 

712 

713 return nside_coverage_tract 

714 

715 

716class ConsolidateHealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections, 

717 dimensions=("band", "skymap",), 

718 defaultTemplates={"coaddName": "deep"}): 

719 sky_map = pipeBase.connectionTypes.Input( 

720 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures", 

721 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

722 storageClass="SkyMap", 

723 dimensions=("skymap",), 

724 ) 

725 

726 # Create output connections for all possible maps defined in the 

727 # registry. The vars() trick used here allows us to set class attributes 

728 # programatically. Taken from 

729 # https://stackoverflow.com/questions/2519807/ 

730 # setting-a-class-attribute-with-a-given-name-in-python-while-defining-the-class 

731 for name in BasePropertyMap.registry: 

732 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Input( 

733 doc=f"Minimum-value map of {name}", 

734 name=f"{{coaddName}}Coadd_{name}_map_min", 

735 storageClass="HealSparseMap", 

736 dimensions=("tract", "skymap", "band"), 

737 multiple=True, 

738 deferLoad=True, 

739 ) 

740 vars()[f"{name}_consolidated_map_min"] = pipeBase.connectionTypes.Output( 

741 doc=f"Minumum-value map of {name}", 

742 name=f"{{coaddName}}Coadd_{name}_consolidated_map_min", 

743 storageClass="HealSparseMap", 

744 dimensions=("skymap", "band"), 

745 ) 

746 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Input( 

747 doc=f"Maximum-value map of {name}", 

748 name=f"{{coaddName}}Coadd_{name}_map_max", 

749 storageClass="HealSparseMap", 

750 dimensions=("tract", "skymap", "band"), 

751 multiple=True, 

752 deferLoad=True, 

753 ) 

754 vars()[f"{name}_consolidated_map_max"] = pipeBase.connectionTypes.Output( 

755 doc=f"Minumum-value map of {name}", 

756 name=f"{{coaddName}}Coadd_{name}_consolidated_map_max", 

757 storageClass="HealSparseMap", 

758 dimensions=("skymap", "band"), 

759 ) 

760 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Input( 

761 doc=f"Mean-value map of {name}", 

762 name=f"{{coaddName}}Coadd_{name}_map_mean", 

763 storageClass="HealSparseMap", 

764 dimensions=("tract", "skymap", "band"), 

765 multiple=True, 

766 deferLoad=True, 

767 ) 

768 vars()[f"{name}_consolidated_map_mean"] = pipeBase.connectionTypes.Output( 

769 doc=f"Minumum-value map of {name}", 

770 name=f"{{coaddName}}Coadd_{name}_consolidated_map_mean", 

771 storageClass="HealSparseMap", 

772 dimensions=("skymap", "band"), 

773 ) 

774 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Input( 

775 doc=f"Weighted mean-value map of {name}", 

776 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean", 

777 storageClass="HealSparseMap", 

778 dimensions=("tract", "skymap", "band"), 

779 multiple=True, 

780 deferLoad=True, 

781 ) 

782 vars()[f"{name}_consolidated_map_weighted_mean"] = pipeBase.connectionTypes.Output( 

783 doc=f"Minumum-value map of {name}", 

784 name=f"{{coaddName}}Coadd_{name}_consolidated_map_weighted_mean", 

785 storageClass="HealSparseMap", 

786 dimensions=("skymap", "band"), 

787 ) 

788 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Input( 

789 doc=f"Sum-value map of {name}", 

790 name=f"{{coaddName}}Coadd_{name}_map_sum", 

791 storageClass="HealSparseMap", 

792 dimensions=("tract", "skymap", "band"), 

793 multiple=True, 

794 deferLoad=True, 

795 ) 

796 vars()[f"{name}_consolidated_map_sum"] = pipeBase.connectionTypes.Output( 

797 doc=f"Minumum-value map of {name}", 

798 name=f"{{coaddName}}Coadd_{name}_consolidated_map_sum", 

799 storageClass="HealSparseMap", 

800 dimensions=("skymap", "band"), 

801 ) 

802 

803 def __init__(self, *, config=None): 

804 super().__init__(config=config) 

805 

806 # Not all possible maps in the registry will be configured to run. 

807 # Here we remove the unused connections. 

808 for name in BasePropertyMap.registry: 

809 if name not in config.property_maps: 

810 prop_config = BasePropertyMapConfig() 

811 prop_config.do_min = False 

812 prop_config.do_max = False 

813 prop_config.do_mean = False 

814 prop_config.do_weighted_mean = False 

815 prop_config.do_sum = False 

816 else: 

817 prop_config = config.property_maps[name] 

818 

819 if not prop_config.do_min: 

820 self.inputs.remove(f"{name}_map_min") 

821 self.outputs.remove(f"{name}_consolidated_map_min") 

822 if not prop_config.do_max: 

823 self.inputs.remove(f"{name}_map_max") 

824 self.outputs.remove(f"{name}_consolidated_map_max") 

825 if not prop_config.do_mean: 

826 self.inputs.remove(f"{name}_map_mean") 

827 self.outputs.remove(f"{name}_consolidated_map_mean") 

828 if not prop_config.do_weighted_mean: 

829 self.inputs.remove(f"{name}_map_weighted_mean") 

830 self.outputs.remove(f"{name}_consolidated_map_weighted_mean") 

831 if not prop_config.do_sum: 

832 self.inputs.remove(f"{name}_map_sum") 

833 self.outputs.remove(f"{name}_consolidated_map_sum") 

834 

835 

836class ConsolidateHealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig, 

837 pipelineConnections=ConsolidateHealSparsePropertyMapConnections): 

838 """Configuration parameters for ConsolidateHealSparsePropertyMapTask""" 

839 property_maps = BasePropertyMap.registry.makeField( 

840 multi=True, 

841 default=["exposure_time", 

842 "psf_size", 

843 "psf_e1", 

844 "psf_e2", 

845 "psf_maglim", 

846 "sky_noise", 

847 "sky_background", 

848 "dcr_dra", 

849 "dcr_ddec", 

850 "dcr_e1", 

851 "dcr_e2"], 

852 doc="Property map computation objects", 

853 ) 

854 nside_coverage = pexConfig.Field( 

855 doc="Consolidated HealSparse coverage map nside. Must be power of 2.", 

856 dtype=int, 

857 default=32, 

858 check=_is_power_of_two, 

859 ) 

860 

861 def setDefaults(self): 

862 self.property_maps["exposure_time"].do_sum = True 

863 self.property_maps["psf_size"].do_weighted_mean = True 

864 self.property_maps["psf_e1"].do_weighted_mean = True 

865 self.property_maps["psf_e2"].do_weighted_mean = True 

866 self.property_maps["psf_maglim"].do_weighted_mean = True 

867 self.property_maps["sky_noise"].do_weighted_mean = True 

868 self.property_maps["sky_background"].do_weighted_mean = True 

869 self.property_maps["dcr_dra"].do_weighted_mean = True 

870 self.property_maps["dcr_ddec"].do_weighted_mean = True 

871 self.property_maps["dcr_e1"].do_weighted_mean = True 

872 self.property_maps["dcr_e2"].do_weighted_mean = True 

873 

874 

875class ConsolidateHealSparsePropertyMapTask(pipeBase.PipelineTask): 

876 """Task to consolidate HealSparse property maps. 

877 

878 This task will take all the individual tract-based maps (per map type, 

879 per band) and consolidate them into one survey-wide map (per map type, 

880 per band). Each tract map is truncated to its inner region before 

881 consolidation. 

882 """ 

883 ConfigClass = ConsolidateHealSparsePropertyMapConfig 

884 _DefaultName = "consolidateHealSparsePropertyMapTask" 

885 

886 def __init__(self, **kwargs): 

887 super().__init__(**kwargs) 

888 self.property_maps = PropertyMapMap() 

889 for name, config, PropertyMapClass in self.config.property_maps.apply(): 

890 self.property_maps[name] = PropertyMapClass(config, name) 

891 

892 @timeMethod 

893 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

894 inputs = butlerQC.get(inputRefs) 

895 

896 sky_map = inputs.pop("sky_map") 

897 

898 # These need to be consolidated one at a time to conserve memory. 

899 for name in self.config.property_maps.names: 

900 for type_ in ['min', 'max', 'mean', 'weighted_mean', 'sum']: 

901 map_type = f"{name}_map_{type_}" 

902 if map_type in inputs: 

903 input_refs = {ref.dataId['tract']: ref 

904 for ref in inputs[map_type]} 

905 consolidated_map = self.consolidate_map(sky_map, input_refs) 

906 butlerQC.put(consolidated_map, 

907 getattr(outputRefs, f"{name}_consolidated_map_{type_}")) 

908 

909 def consolidate_map(self, sky_map, input_refs): 

910 """Consolidate the healsparse property maps. 

911 

912 Parameters 

913 ---------- 

914 sky_map : Sky map object 

915 input_refs : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

916 Dictionary of tract_id mapping to dataref. 

917 

918 Returns 

919 ------- 

920 consolidated_map : `healsparse.HealSparseMap` 

921 Consolidated HealSparse map. 

922 """ 

923 # First, we read in the coverage maps to know how much memory 

924 # to allocate 

925 cov_mask = None 

926 nside_coverage_inputs = None 

927 for tract_id in input_refs: 

928 cov = input_refs[tract_id].get(component='coverage') 

929 if cov_mask is None: 

930 cov_mask = cov.coverage_mask 

931 nside_coverage_inputs = cov.nside_coverage 

932 else: 

933 cov_mask |= cov.coverage_mask 

934 

935 cov_pix_inputs, = np.where(cov_mask) 

936 

937 # Compute the coverage pixels for the desired nside_coverage 

938 if nside_coverage_inputs == self.config.nside_coverage: 

939 cov_pix = cov_pix_inputs 

940 elif nside_coverage_inputs > self.config.nside_coverage: 

941 # Converting from higher resolution coverage to lower 

942 # resolution coverage. 

943 bit_shift = hsp.utils._compute_bitshift(self.config.nside_coverage, 

944 nside_coverage_inputs) 

945 cov_pix = np.right_shift(cov_pix_inputs, bit_shift) 

946 else: 

947 # Converting from lower resolution coverage to higher 

948 # resolution coverage. 

949 bit_shift = hsp.utils._compute_bitshift(nside_coverage_inputs, 

950 self.config.nside_coverage) 

951 cov_pix = np.left_shift(cov_pix_inputs, bit_shift) 

952 

953 # Now read in each tract map and build the consolidated map. 

954 consolidated_map = None 

955 for tract_id in input_refs: 

956 input_map = input_refs[tract_id].get() 

957 if consolidated_map is None: 

958 consolidated_map = hsp.HealSparseMap.make_empty( 

959 self.config.nside_coverage, 

960 input_map.nside_sparse, 

961 input_map.dtype, 

962 sentinel=input_map._sentinel, 

963 cov_pixels=cov_pix) 

964 

965 # Only use pixels that are properly inside the tract. 

966 vpix, ra, dec = input_map.valid_pixels_pos(return_pixels=True) 

967 vpix_tract_ids = sky_map.findTractIdArray(ra, dec, degrees=True) 

968 

969 in_tract = (vpix_tract_ids == tract_id) 

970 

971 consolidated_map[vpix[in_tract]] = input_map[vpix[in_tract]] 

972 

973 return consolidated_map