Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# 

2# LSST Data Management System 

3# Copyright 2008-2021 AURA/LSST. 

4# 

5# This product includes software developed by the 

6# LSST Project (http://www.lsst.org/). 

7# 

8# This program is free software: you can redistribute it and/or modify 

9# it under the terms of the GNU General Public License as published by 

10# the Free Software Foundation, either version 3 of the License, or 

11# (at your option) any later version. 

12# 

13# This program is distributed in the hope that it will be useful, 

14# but WITHOUT ANY WARRANTY; without even the implied warranty of 

15# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

16# GNU General Public License for more details. 

17# 

18# You should have received a copy of the LSST License Statement and 

19# the GNU General Public License along with this program. If not, 

20# see <http://www.lsstcorp.org/LegalNotices/>. 

21# 

22from collections import defaultdict 

23import numbers 

24import numpy as np 

25import healpy as hp 

26import healsparse as hsp 

27 

28import lsst.pex.config as pexConfig 

29import lsst.pipe.base as pipeBase 

30import lsst.geom 

31import lsst.afw.geom as afwGeom 

32from lsst.daf.butler import Formatter 

33from lsst.skymap import BaseSkyMap 

34from .healSparseMappingProperties import (BasePropertyMap, BasePropertyMapConfig, 

35 PropertyMapMap, compute_approx_psf_size_and_shape) 

36 

37 

38__all__ = ["HealSparseInputMapTask", "HealSparseInputMapConfig", 

39 "HealSparseMapFormatter", "HealSparsePropertyMapConnections", 

40 "HealSparsePropertyMapConfig", "HealSparsePropertyMapTask"] 

41 

42 

43class HealSparseMapFormatter(Formatter): 

44 """Interface for reading and writing healsparse.HealSparseMap files.""" 

45 unsupportedParameters = frozenset() 

46 supportedExtensions = frozenset({".hsp", ".fit", ".fits"}) 

47 extension = '.hsp' 

48 

49 def read(self, component=None): 

50 # Docstring inherited from Formatter.read. 

51 path = self.fileDescriptor.location.path 

52 

53 if component == 'coverage': 

54 try: 

55 data = hsp.HealSparseCoverage.read(path) 

56 except (OSError, RuntimeError): 

57 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}") 

58 

59 return data 

60 

61 if self.fileDescriptor.parameters is None: 

62 pixels = None 

63 degrade_nside = None 

64 else: 

65 pixels = self.fileDescriptor.parameters.get('pixels', None) 

66 degrade_nside = self.fileDescriptor.parameters.get('degrade_nside', None) 

67 try: 

68 data = hsp.HealSparseMap.read(path, pixels=pixels, degrade_nside=degrade_nside) 

69 except (OSError, RuntimeError): 

70 raise ValueError(f"Unable to read healsparse map with URI {self.fileDescriptor.location.uri}") 

71 

72 return data 

73 

74 def write(self, inMemoryDataset): 

75 # Docstring inherited from Formatter.write. 

76 # Update the location with the formatter-preferred file extension 

77 self.fileDescriptor.location.updateExtension(self.extension) 

78 inMemoryDataset.write(self.fileDescriptor.location.path, clobber=True) 

79 

80 

81def _is_power_of_two(value): 

82 """Check that value is a power of two. 

83 

84 Parameters 

85 ---------- 

86 value : `int` 

87 Value to check. 

88 

89 Returns 

90 ------- 

91 is_power_of_two : `bool` 

92 True if value is a power of two; False otherwise, or 

93 if value is not an integer. 

94 """ 

95 if not isinstance(value, numbers.Integral): 95 ↛ 96line 95 didn't jump to line 96, because the condition on line 95 was never true

96 return False 

97 

98 # See https://stackoverflow.com/questions/57025836 

99 # Every power of 2 has exactly 1 bit set to 1; subtracting 

100 # 1 therefore flips every preceding bit. If you and that 

101 # together with the original value it must be 0. 

102 return (value & (value - 1) == 0) and value != 0 

103 

104 

105class HealSparseInputMapConfig(pexConfig.Config): 

106 """Configuration parameters for HealSparseInputMapTask""" 

107 nside = pexConfig.Field( 

108 doc="Mapping healpix nside. Must be power of 2.", 

109 dtype=int, 

110 default=32768, 

111 check=_is_power_of_two, 

112 ) 

113 nside_coverage = pexConfig.Field( 

114 doc="HealSparse coverage map nside. Must be power of 2.", 

115 dtype=int, 

116 default=256, 

117 check=_is_power_of_two, 

118 ) 

119 bad_mask_min_coverage = pexConfig.Field( 

120 doc=("Minimum area fraction of a map healpixel pixel that must be " 

121 "covered by bad pixels to be removed from the input map. " 

122 "This is approximate."), 

123 dtype=float, 

124 default=0.5, 

125 ) 

126 

127 

128class HealSparseInputMapTask(pipeBase.Task): 

129 """Task for making a HealSparse input map.""" 

130 

131 ConfigClass = HealSparseInputMapConfig 

132 _DefaultName = "healSparseInputMap" 

133 

134 def __init__(self, **kwargs): 

135 pipeBase.Task.__init__(self, **kwargs) 

136 

137 self.ccd_input_map = None 

138 

139 def build_ccd_input_map(self, bbox, wcs, ccds): 

140 """Build a map from ccd valid polygons or bounding boxes. 

141 

142 Parameters 

143 ---------- 

144 bbox : `lsst.geom.Box2I` 

145 Bounding box for region to build input map. 

146 wcs : `lsst.afw.geom.SkyWcs` 

147 WCS object for region to build input map. 

148 ccds : `lsst.afw.table.ExposureCatalog` 

149 Exposure catalog with ccd data from coadd inputs. 

150 """ 

151 self.ccd_input_map = hsp.HealSparseMap.make_empty(nside_coverage=self.config.nside_coverage, 

152 nside_sparse=self.config.nside, 

153 dtype=hsp.WIDE_MASK, 

154 wide_mask_maxbits=len(ccds)) 

155 self._wcs = wcs 

156 self._bbox = bbox 

157 self._ccds = ccds 

158 

159 pixel_scale = wcs.getPixelScale().asArcseconds() 

160 hpix_area_arcsec2 = hp.nside2pixarea(self.config.nside, degrees=True)*(3600.**2.) 

161 self._min_bad = self.config.bad_mask_min_coverage*hpix_area_arcsec2/(pixel_scale**2.) 

162 

163 metadata = {} 

164 self._bits_per_visit_ccd = {} 

165 self._bits_per_visit = defaultdict(list) 

166 for bit, ccd_row in enumerate(ccds): 

167 metadata[f"B{bit:04d}CCD"] = ccd_row["ccd"] 

168 metadata[f"B{bit:04d}VIS"] = ccd_row["visit"] 

169 metadata[f"B{bit:04d}WT"] = ccd_row["weight"] 

170 

171 self._bits_per_visit_ccd[(ccd_row["visit"], ccd_row["ccd"])] = bit 

172 self._bits_per_visit[ccd_row["visit"]].append(bit) 

173 

174 ccd_poly = ccd_row.getValidPolygon() 

175 if ccd_poly is None: 

176 ccd_poly = afwGeom.Polygon(lsst.geom.Box2D(ccd_row.getBBox())) 

177 # Detectors need to be rendered with their own wcs. 

178 ccd_poly_radec = self._pixels_to_radec(ccd_row.getWcs(), ccd_poly.convexHull().getVertices()) 

179 

180 # Create a ccd healsparse polygon 

181 poly = hsp.Polygon(ra=ccd_poly_radec[: -1, 0], 

182 dec=ccd_poly_radec[: -1, 1], 

183 value=[bit]) 

184 self.ccd_input_map.set_bits_pix(poly.get_pixels(nside=self.ccd_input_map.nside_sparse), 

185 [bit]) 

186 

187 # Cut down to the overall bounding box with associated wcs. 

188 bbox_afw_poly = afwGeom.Polygon(lsst.geom.Box2D(bbox)) 

189 bbox_poly_radec = self._pixels_to_radec(self._wcs, 

190 bbox_afw_poly.convexHull().getVertices()) 

191 bbox_poly = hsp.Polygon(ra=bbox_poly_radec[: -1, 0], dec=bbox_poly_radec[: -1, 1], 

192 value=np.arange(self.ccd_input_map.wide_mask_maxbits)) 

193 bbox_poly_map = bbox_poly.get_map_like(self.ccd_input_map) 

194 self.ccd_input_map = hsp.and_intersection([self.ccd_input_map, bbox_poly_map]) 

195 self.ccd_input_map.metadata = metadata 

196 

197 # Create a temporary map to hold the count of bad pixels in each healpix pixel 

198 self._ccd_input_pixels = self.ccd_input_map.valid_pixels 

199 

200 dtype = [(f"v{visit}", "i4") for visit in self._bits_per_visit.keys()] 

201 

202 cov = self.config.nside_coverage 

203 ns = self.config.nside 

204 self._ccd_input_bad_count_map = hsp.HealSparseMap.make_empty(nside_coverage=cov, 

205 nside_sparse=ns, 

206 dtype=dtype, 

207 primary=dtype[0][0]) 

208 # Don't set input bad map if there are no ccds which overlap the bbox. 

209 if len(self._ccd_input_pixels) > 0: 

210 self._ccd_input_bad_count_map[self._ccd_input_pixels] = np.zeros(1, dtype=dtype) 

211 

212 def mask_warp_bbox(self, bbox, visit, mask, bit_mask_value): 

213 """Mask a subregion from a visit. 

214 This must be run after build_ccd_input_map initializes 

215 the overall map. 

216 

217 Parameters 

218 ---------- 

219 bbox : `lsst.geom.Box2I` 

220 Bounding box from region to mask. 

221 visit : `int` 

222 Visit number corresponding to warp with mask. 

223 mask : `lsst.afw.image.MaskX` 

224 Mask plane from warp exposure. 

225 bit_mask_value : `int` 

226 Bit mask to check for bad pixels. 

227 

228 Raises 

229 ------ 

230 RuntimeError : Raised if build_ccd_input_map was not run first. 

231 """ 

232 if self.ccd_input_map is None: 

233 raise RuntimeError("Must run build_ccd_input_map before mask_warp_bbox") 

234 

235 # Find the bad pixels and convert to healpix 

236 bad_pixels = np.where(mask.array & bit_mask_value) 

237 if len(bad_pixels[0]) == 0: 

238 # No bad pixels 

239 return 

240 

241 # Bad pixels come from warps which use the overall wcs. 

242 bad_ra, bad_dec = self._wcs.pixelToSkyArray(bad_pixels[1].astype(np.float64), 

243 bad_pixels[0].astype(np.float64), 

244 degrees=True) 

245 bad_hpix = hp.ang2pix(self.config.nside, bad_ra, bad_dec, 

246 lonlat=True, nest=True) 

247 

248 # Count the number of bad image pixels in each healpix pixel 

249 min_bad_hpix = bad_hpix.min() 

250 bad_hpix_count = np.zeros(bad_hpix.max() - min_bad_hpix + 1, dtype=np.int32) 

251 np.add.at(bad_hpix_count, bad_hpix - min_bad_hpix, 1) 

252 

253 # Add these to the accumulator map. 

254 # We need to make sure that the "primary" array has valid values for 

255 # this pixel to be registered in the accumulator map. 

256 pix_to_add, = np.where(bad_hpix_count > 0) 

257 count_map_arr = self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] 

258 primary = self._ccd_input_bad_count_map.primary 

259 count_map_arr[primary] = np.clip(count_map_arr[primary], 0, None) 

260 

261 count_map_arr[f"v{visit}"] = np.clip(count_map_arr[f"v{visit}"], 0, None) 

262 count_map_arr[f"v{visit}"] += bad_hpix_count[pix_to_add] 

263 

264 self._ccd_input_bad_count_map[min_bad_hpix + pix_to_add] = count_map_arr 

265 

266 def finalize_ccd_input_map_mask(self): 

267 """Use accumulated mask information to finalize the masking of 

268 ccd_input_map. 

269 

270 Raises 

271 ------ 

272 RuntimeError : Raised if build_ccd_input_map was not run first. 

273 """ 

274 if self.ccd_input_map is None: 

275 raise RuntimeError("Must run build_ccd_input_map before finalize_ccd_input_map_mask.") 

276 

277 count_map_arr = self._ccd_input_bad_count_map[self._ccd_input_pixels] 

278 for visit in self._bits_per_visit: 

279 to_mask, = np.where(count_map_arr[f"v{visit}"] > self._min_bad) 

280 if to_mask.size == 0: 

281 continue 

282 self.ccd_input_map.clear_bits_pix(self._ccd_input_pixels[to_mask], 

283 self._bits_per_visit[visit]) 

284 

285 # Clear memory 

286 self._ccd_input_bad_count_map = None 

287 

288 def _pixels_to_radec(self, wcs, pixels): 

289 """Convert pixels to ra/dec positions using a wcs. 

290 

291 Parameters 

292 ---------- 

293 wcs : `lsst.afw.geom.SkyWcs` 

294 WCS object. 

295 pixels : `list` [`lsst.geom.Point2D`] 

296 List of pixels to convert. 

297 

298 Returns 

299 ------- 

300 radec : `numpy.ndarray` 

301 Nx2 array of ra/dec positions associated with pixels. 

302 """ 

303 sph_pts = wcs.pixelToSky(pixels) 

304 return np.array([(sph.getRa().asDegrees(), sph.getDec().asDegrees()) 

305 for sph in sph_pts]) 

306 

307 

308class HealSparsePropertyMapConnections(pipeBase.PipelineTaskConnections, 

309 dimensions=("tract", "band", "skymap",), 

310 defaultTemplates={"coaddName": "deep"}): 

311 input_maps = pipeBase.connectionTypes.Input( 

312 doc="Healsparse bit-wise coadd input maps", 

313 name="{coaddName}Coadd_inputMap", 

314 storageClass="HealSparseMap", 

315 dimensions=("tract", "patch", "skymap", "band"), 

316 multiple=True, 

317 deferLoad=True, 

318 ) 

319 coadd_exposures = pipeBase.connectionTypes.Input( 

320 doc="Coadded exposures associated with input_maps", 

321 name="{coaddName}Coadd", 

322 storageClass="ExposureF", 

323 dimensions=("tract", "patch", "skymap", "band"), 

324 multiple=True, 

325 deferLoad=True, 

326 ) 

327 visit_summaries = pipeBase.connectionTypes.Input( 

328 doc="Visit summary tables with aggregated statistics", 

329 name="visitSummary", 

330 storageClass="ExposureCatalog", 

331 dimensions=("instrument", "visit"), 

332 multiple=True, 

333 deferLoad=True, 

334 ) 

335 sky_map = pipeBase.connectionTypes.Input( 

336 doc="Input definition of geometry/bbox and projection/wcs for coadded exposures", 

337 name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME, 

338 storageClass="SkyMap", 

339 dimensions=("skymap",), 

340 ) 

341 

342 # Create output connections for all possible maps defined in the 

343 # registry. 

344 for name in BasePropertyMap.registry: 

345 vars()[f"{name}_map_min"] = pipeBase.connectionTypes.Output( 

346 doc=f"Minimum-value map of {name}", 

347 name=f"{{coaddName}}Coadd_{name}_map_min", 

348 storageClass="HealSparseMap", 

349 dimensions=("tract", "skymap", "band"), 

350 ) 

351 vars()[f"{name}_map_max"] = pipeBase.connectionTypes.Output( 

352 doc=f"Maximum-value map of {name}", 

353 name=f"{{coaddName}}Coadd_{name}_map_max", 

354 storageClass="HealSparseMap", 

355 dimensions=("tract", "skymap", "band"), 

356 ) 

357 vars()[f"{name}_map_mean"] = pipeBase.connectionTypes.Output( 

358 doc=f"Mean-value map of {name}", 

359 name=f"{{coaddName}}Coadd_{name}_map_mean", 

360 storageClass="HealSparseMap", 

361 dimensions=("tract", "skymap", "band"), 

362 ) 

363 vars()[f"{name}_map_weighted_mean"] = pipeBase.connectionTypes.Output( 

364 doc=f"Weighted mean-value map of {name}", 

365 name=f"{{coaddName}}Coadd_{name}_map_weighted_mean", 

366 storageClass="HealSparseMap", 

367 dimensions=("tract", "skymap", "band"), 

368 ) 

369 vars()[f"{name}_map_sum"] = pipeBase.connectionTypes.Output( 

370 doc=f"Sum-value map of {name}", 

371 name=f"{{coaddName}}Coadd_{name}_map_sum", 

372 storageClass="HealSparseMap", 

373 dimensions=("tract", "skymap", "band"), 

374 ) 

375 

376 def __init__(self, *, config=None): 

377 super().__init__(config=config) 

378 

379 # Remove connections from those that are not configured 

380 for name in BasePropertyMap.registry: 

381 if name not in config.property_maps: 

382 prop_config = BasePropertyMapConfig() 

383 prop_config.do_min = False 

384 prop_config.do_max = False 

385 prop_config.do_mean = False 

386 prop_config.do_weighted_mean = False 

387 prop_config.do_sum = False 

388 else: 

389 prop_config = config.property_maps[name] 

390 

391 if not prop_config.do_min: 

392 self.outputs.remove(f"{name}_map_min") 

393 if not prop_config.do_max: 

394 self.outputs.remove(f"{name}_map_max") 

395 if not prop_config.do_mean: 

396 self.outputs.remove(f"{name}_map_mean") 

397 if not prop_config.do_weighted_mean: 

398 self.outputs.remove(f"{name}_map_weighted_mean") 

399 if not prop_config.do_sum: 

400 self.outputs.remove(f"{name}_map_sum") 

401 

402 

403class HealSparsePropertyMapConfig(pipeBase.PipelineTaskConfig, 

404 pipelineConnections=HealSparsePropertyMapConnections): 

405 """Configuration parameters for HealSparsePropertyMapTask""" 

406 property_maps = BasePropertyMap.registry.makeField( 

407 multi=True, 

408 default=["exposure_time", 

409 "psf_size", 

410 "psf_e1", 

411 "psf_e2", 

412 "psf_maglim", 

413 "sky_noise", 

414 "sky_background", 

415 "dcr_dra", 

416 "dcr_ddec", 

417 "dcr_e1", 

418 "dcr_e2"], 

419 doc="Property map computation objects", 

420 ) 

421 

422 def setDefaults(self): 

423 self.property_maps["exposure_time"].do_sum = True 

424 self.property_maps["psf_size"].do_weighted_mean = True 

425 self.property_maps["psf_e1"].do_weighted_mean = True 

426 self.property_maps["psf_e2"].do_weighted_mean = True 

427 self.property_maps["psf_maglim"].do_weighted_mean = True 

428 self.property_maps["sky_noise"].do_weighted_mean = True 

429 self.property_maps["sky_background"].do_weighted_mean = True 

430 self.property_maps["dcr_dra"].do_weighted_mean = True 

431 self.property_maps["dcr_ddec"].do_weighted_mean = True 

432 self.property_maps["dcr_e1"].do_weighted_mean = True 

433 self.property_maps["dcr_e2"].do_weighted_mean = True 

434 

435 

436class HealSparsePropertyMapTask(pipeBase.PipelineTask): 

437 """Task to compute Healsparse property maps.""" 

438 ConfigClass = HealSparsePropertyMapConfig 

439 _DefaultName = "healSparsePropertyMapTask" 

440 

441 def __init__(self, **kwargs): 

442 super().__init__(**kwargs) 

443 self.property_maps = PropertyMapMap() 

444 for name, config, PropertyMapClass in self.config.property_maps.apply(): 

445 self.property_maps[name] = PropertyMapClass(config, name) 

446 

447 @pipeBase.timeMethod 

448 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

449 inputs = butlerQC.get(inputRefs) 

450 

451 sky_map = inputs.pop("sky_map") 

452 

453 tract = butlerQC.quantum.dataId["tract"] 

454 band = butlerQC.quantum.dataId["band"] 

455 

456 input_map_dict = {ref.dataId["patch"]: ref for ref in inputs["input_maps"]} 

457 coadd_dict = {ref.dataId["patch"]: ref for ref in inputs["coadd_exposures"]} 

458 

459 visit_summary_dict = {ref.dataId["visit"]: ref.get() 

460 for ref in inputs["visit_summaries"]} 

461 

462 self.run(sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict) 

463 

464 # Write the outputs 

465 for name, property_map in self.property_maps.items(): 

466 if property_map.config.do_min: 

467 butlerQC.put(property_map.min_map, 

468 getattr(outputRefs, f"{name}_map_min")) 

469 if property_map.config.do_max: 

470 butlerQC.put(property_map.max_map, 

471 getattr(outputRefs, f"{name}_map_max")) 

472 if property_map.config.do_mean: 

473 butlerQC.put(property_map.mean_map, 

474 getattr(outputRefs, f"{name}_map_mean")) 

475 if property_map.config.do_weighted_mean: 

476 butlerQC.put(property_map.weighted_mean_map, 

477 getattr(outputRefs, f"{name}_map_weighted_mean")) 

478 if property_map.config.do_sum: 

479 butlerQC.put(property_map.sum_map, 

480 getattr(outputRefs, f"{name}_map_sum")) 

481 

482 def run(self, sky_map, tract, band, coadd_dict, input_map_dict, visit_summary_dict): 

483 """Run the healsparse property task. 

484 

485 Parameters 

486 ---------- 

487 sky_map : Sky map object 

488 tract : `int` 

489 Tract number. 

490 band : `str` 

491 Band name for logging. 

492 coadd_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

493 Dictionary of coadd exposure references. Keys are patch numbers. 

494 input_map_dict : `dict` [`int`: `lsst.daf.butler.DeferredDatasetHandle`] 

495 Dictionary of input map references. Keys are patch numbers. 

496 visit_summary_dict : `dict` [`int`: `lsst.afw.table.ExposureCatalog`] 

497 Dictionary of visit summary tables. Keys are visit numbers. 

498 

499 Raises 

500 ------ 

501 RepeatableQuantumError 

502 If visit_summary_dict is missing any visits or detectors found in an 

503 input map. This leads to an inconsistency between what is in the coadd 

504 (via the input map) and the visit summary tables which contain data 

505 to compute the maps. 

506 """ 

507 tract_info = sky_map[tract] 

508 

509 tract_maps_initialized = False 

510 

511 for patch in input_map_dict.keys(): 

512 self.log.info("Making maps for band %s, tract %d, patch %d.", 

513 band, tract, patch) 

514 

515 patch_info = tract_info[patch] 

516 

517 input_map = input_map_dict[patch].get() 

518 coadd_photo_calib = coadd_dict[patch].get(component="photoCalib") 

519 coadd_inputs = coadd_dict[patch].get(component="coaddInputs") 

520 

521 coadd_zeropoint = 2.5*np.log10(coadd_photo_calib.getInstFluxAtZeroMagnitude()) 

522 

523 # Crop input_map to the inner polygon of the patch 

524 poly_vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices() 

525 patch_radec = self._vertices_to_radec(poly_vertices) 

526 patch_poly = hsp.Polygon(ra=patch_radec[:, 0], dec=patch_radec[:, 1], 

527 value=np.arange(input_map.wide_mask_maxbits)) 

528 patch_poly_map = patch_poly.get_map_like(input_map) 

529 input_map = hsp.and_intersection([input_map, patch_poly_map]) 

530 

531 if not tract_maps_initialized: 

532 # We use the first input map nside information to initialize 

533 # the tract maps 

534 nside_coverage = self._compute_nside_coverage_tract(tract_info) 

535 nside = input_map.nside_sparse 

536 

537 do_compute_approx_psf = False 

538 # Initialize the tract maps 

539 for property_map in self.property_maps: 

540 property_map.initialize_tract_maps(nside_coverage, nside) 

541 if property_map.requires_psf: 

542 do_compute_approx_psf = True 

543 

544 tract_maps_initialized = True 

545 

546 valid_pixels, vpix_ra, vpix_dec = input_map.valid_pixels_pos(return_pixels=True) 

547 

548 # Check if there are no valid pixels for the inner (unique) patch region 

549 if valid_pixels.size == 0: 

550 continue 

551 

552 # Initialize the value accumulators 

553 for property_map in self.property_maps: 

554 property_map.initialize_values(valid_pixels.size) 

555 property_map.zeropoint = coadd_zeropoint 

556 

557 # Initialize the weight and counter accumulators 

558 total_weights = np.zeros(valid_pixels.size) 

559 total_inputs = np.zeros(valid_pixels.size, dtype=np.int32) 

560 

561 for bit, ccd_row in enumerate(coadd_inputs.ccds): 

562 # Which pixels in the map are used by this visit/detector 

563 inmap, = np.where(input_map.check_bits_pix(valid_pixels, [bit])) 

564 

565 # Check if there are any valid pixels in the map from this deteector. 

566 if inmap.size == 0: 

567 continue 

568 

569 # visit, detector_id, weight = input_dict[bit] 

570 visit = ccd_row["visit"] 

571 detector_id = ccd_row["ccd"] 

572 weight = ccd_row["weight"] 

573 

574 x, y = ccd_row.getWcs().skyToPixelArray(vpix_ra[inmap], vpix_dec[inmap], degrees=True) 

575 scalings = self._compute_calib_scale(ccd_row, x, y) 

576 

577 if do_compute_approx_psf: 

578 psf_array = compute_approx_psf_size_and_shape(ccd_row, vpix_ra[inmap], vpix_dec[inmap]) 

579 else: 

580 psf_array = None 

581 

582 total_weights[inmap] += weight 

583 total_inputs[inmap] += 1 

584 

585 # Retrieve the correct visitSummary row 

586 if visit not in visit_summary_dict: 

587 msg = f"Visit {visit} not found in visit_summaries." 

588 raise pipeBase.RepeatableQuantumError(msg) 

589 row = visit_summary_dict[visit].find(detector_id) 

590 if row is None: 

591 msg = f"Visit {visit} / detector_id {detector_id} not found in visit_summaries." 

592 raise pipeBase.RepeatableQuantumError(msg) 

593 

594 # Accumulate the values 

595 for property_map in self.property_maps: 

596 property_map.accumulate_values(inmap, 

597 vpix_ra[inmap], 

598 vpix_dec[inmap], 

599 weight, 

600 scalings, 

601 row, 

602 psf_array=psf_array) 

603 

604 # Finalize the mean values and set the tract maps 

605 for property_map in self.property_maps: 

606 property_map.finalize_mean_values(total_weights, total_inputs) 

607 property_map.set_map_values(valid_pixels) 

608 

609 def _compute_calib_scale(self, ccd_row, x, y): 

610 """Compute calibration scaling values. 

611 

612 Parameters 

613 ---------- 

614 ccd_row : `lsst.afw.table.ExposureRecord` 

615 Exposure metadata for a given detector exposure. 

616 x : `np.ndarray` 

617 Array of x positions. 

618 y : `np.ndarray` 

619 Array of y positions. 

620 

621 Returns 

622 ------- 

623 calib_scale : `np.ndarray` 

624 Array of calibration scale values. 

625 """ 

626 photo_calib = ccd_row.getPhotoCalib() 

627 bf = photo_calib.computeScaledCalibration() 

628 if bf.getBBox() == ccd_row.getBBox(): 

629 # Track variable calibration over the detector 

630 calib_scale = photo_calib.getCalibrationMean()*bf.evaluate(x, y) 

631 else: 

632 # Spatially constant calibration 

633 calib_scale = photo_calib.getCalibrationMean() 

634 

635 return calib_scale 

636 

637 def _vertices_to_radec(self, vertices): 

638 """Convert polygon vertices to ra/dec. 

639 

640 Parameters 

641 ---------- 

642 vertices : `list` [ `lsst.sphgeom.UnitVector3d` ] 

643 Vertices for bounding polygon. 

644 

645 Returns 

646 ------- 

647 radec : `numpy.ndarray` 

648 Nx2 array of ra/dec positions (in degrees) associated with vertices. 

649 """ 

650 lonlats = [lsst.sphgeom.LonLat(x) for x in vertices] 

651 radec = np.array([(x.getLon().asDegrees(), x.getLat().asDegrees()) for 

652 x in lonlats]) 

653 return radec 

654 

655 def _compute_nside_coverage_tract(self, tract_info): 

656 """Compute the optimal coverage nside for a tract. 

657 

658 Parameters 

659 ---------- 

660 tract_info : `lsst.skymap.tractInfo.ExplicitTractInfo` 

661 Tract information object. 

662 

663 Returns 

664 ------- 

665 nside_coverage : `int` 

666 Optimal coverage nside for a tract map. 

667 """ 

668 num_patches = tract_info.getNumPatches() 

669 

670 # Compute approximate patch area 

671 patch_info = tract_info.getPatchInfo(0) 

672 vertices = patch_info.getInnerSkyPolygon(tract_info.getWcs()).getVertices() 

673 radec = self._vertices_to_radec(vertices) 

674 delta_ra = np.max(radec[:, 0]) - np.min(radec[:, 0]) 

675 delta_dec = np.max(radec[:, 1]) - np.min(radec[:, 1]) 

676 patch_area = delta_ra*delta_dec*np.cos(np.deg2rad(np.mean(radec[:, 1]))) 

677 

678 tract_area = num_patches[0]*num_patches[1]*patch_area 

679 # Start with a fairly low nside and increase until we find the approximate area. 

680 nside_coverage_tract = 32 

681 while hp.nside2pixarea(nside_coverage_tract, degrees=True) > tract_area: 

682 nside_coverage_tract = 2*nside_coverage_tract 

683 # Step back one, but don't go bigger pixels than nside=32 

684 nside_coverage_tract = int(np.clip(nside_coverage_tract/2, 32, None)) 

685 

686 return nside_coverage_tract