Coverage for python/lsst/pipe/tasks/hips.py: 13%

605 statements  

« prev     ^ index     » next       coverage.py v7.2.5, created at 2023-05-17 03:36 -0700

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22"""Tasks for making and manipulating HIPS images.""" 

23 

24__all__ = ["HighResolutionHipsTask", "HighResolutionHipsConfig", "HighResolutionHipsConnections", 

25 "GenerateHipsTask", "GenerateHipsConfig", "GenerateColorHipsTask", "GenerateColorHipsConfig"] 

26 

27from collections import defaultdict 

28import numpy as np 

29import argparse 

30import io 

31import sys 

32import re 

33import warnings 

34import math 

35from datetime import datetime 

36import hpgeom as hpg 

37import healsparse as hsp 

38from astropy.io import fits 

39from astropy.visualization.lupton_rgb import AsinhMapping 

40from PIL import Image 

41 

42from lsst.sphgeom import RangeSet, HealpixPixelization 

43from lsst.utils.timer import timeMethod 

44from lsst.daf.butler import Butler, DatasetRef, Quantum, SkyPixDimension 

45import lsst.pex.config as pexConfig 

46import lsst.pipe.base as pipeBase 

47import lsst.afw.geom as afwGeom 

48import lsst.afw.math as afwMath 

49import lsst.afw.image as afwImage 

50import lsst.geom as geom 

51from lsst.afw.geom import makeHpxWcs 

52from lsst.resources import ResourcePath 

53 

54 

55class HighResolutionHipsConnections(pipeBase.PipelineTaskConnections, 

56 dimensions=("healpix9", "band"), 

57 defaultTemplates={"coaddName": "deep"}): 

58 coadd_exposure_handles = pipeBase.connectionTypes.Input( 

59 doc="Coadded exposures to convert to HIPS format.", 

60 name="{coaddName}Coadd_calexp", 

61 storageClass="ExposureF", 

62 dimensions=("tract", "patch", "skymap", "band"), 

63 multiple=True, 

64 deferLoad=True, 

65 ) 

66 hips_exposures = pipeBase.connectionTypes.Output( 

67 doc="HiPS-compatible HPX image.", 

68 name="{coaddName}Coadd_hpx", 

69 storageClass="ExposureF", 

70 dimensions=("healpix11", "band"), 

71 multiple=True, 

72 ) 

73 

74 def __init__(self, *, config=None): 

75 super().__init__(config=config) 

76 

77 quantum_order = None 

78 for dim in self.dimensions: 

79 if "healpix" in dim: 

80 if quantum_order is not None: 

81 raise ValueError("Must not specify more than one quantum healpix dimension.") 

82 quantum_order = int(dim.split("healpix")[1]) 

83 if quantum_order is None: 

84 raise ValueError("Must specify a healpix dimension in quantum dimensions.") 

85 

86 if quantum_order > config.hips_order: 

87 raise ValueError("Quantum healpix dimension order must not be greater than hips_order") 

88 

89 order = None 

90 for dim in self.hips_exposures.dimensions: 

91 if "healpix" in dim: 

92 if order is not None: 

93 raise ValueError("Must not specify more than one healpix dimension.") 

94 order = int(dim.split("healpix")[1]) 

95 if order is None: 

96 raise ValueError("Must specify a healpix dimension in hips_exposure dimensions.") 

97 

98 if order != config.hips_order: 

99 raise ValueError("healpix dimension order must match config.hips_order.") 

100 

101 

102class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig, 

103 pipelineConnections=HighResolutionHipsConnections): 

104 """Configuration parameters for HighResolutionHipsTask. 

105 

106 Notes 

107 ----- 

108 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to 

109 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix 

110 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which 

111 defines the target resolution of the HiPS image. The IVOA recommends 

112 shift_order=9, for 2**9=512 pixels on a side. 

113 

114 Table 5 from 

115 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf 

116 shows the relationship between hips_order, number of tiles (full 

117 sky coverage), cell size, and sub-pixel size/image resolution (with 

118 the default shift_order=9): 

119 +------------+-----------------+--------------+------------------+ 

120 | hips_order | Number of Tiles | Cell Size | Image Resolution | 

121 +============+=================+==============+==================+ 

122 | 0 | 12 | 58.63 deg | 6.871 arcmin | 

123 | 1 | 48 | 29.32 deg | 3.435 arcmin | 

124 | 2 | 192 | 14.66 deg | 1.718 arcmin | 

125 | 3 | 768 | 7.329 deg | 51.53 arcsec | 

126 | 4 | 3072 | 3.665 deg | 25.77 arcsec | 

127 | 5 | 12288 | 1.832 deg | 12.88 arcsec | 

128 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec | 

129 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec | 

130 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec | 

131 | 9 | 3145728 | 6.871 arcmin | 805.2mas | 

132 | 10 | 12582912 | 3.435 arcmin | 402.6mas | 

133 | 11 | 50331648 | 1.718 arcmin | 201.3mas | 

134 | 12 | 201326592 | 51.53 arcsec | 100.6mas | 

135 | 13 | 805306368 | 25.77 arcsec | 50.32mas | 

136 +------------+-----------------+--------------+------------------+ 

137 """ 

138 hips_order = pexConfig.Field( 

139 doc="HIPS image order.", 

140 dtype=int, 

141 default=11, 

142 ) 

143 shift_order = pexConfig.Field( 

144 doc="HIPS shift order (such that each tile is 2**shift_order pixels on a side)", 

145 dtype=int, 

146 default=9, 

147 ) 

148 warp = pexConfig.ConfigField( 

149 dtype=afwMath.Warper.ConfigClass, 

150 doc="Warper configuration", 

151 ) 

152 

153 def setDefaults(self): 

154 self.warp.warpingKernelName = "lanczos5" 

155 

156 

157class HipsTaskNameDescriptor: 

158 """Descriptor used create a DefaultName that matches the order of 

159 the defined dimensions in the connections class. 

160 

161 Parameters 

162 ---------- 

163 prefix : `str` 

164 The prefix of the Default name, to which the order will be 

165 appended. 

166 """ 

167 def __init__(self, prefix): 

168 # create a defaultName template 

169 self._defaultName = f"{prefix}{{}}" 

170 self._order = None 

171 

172 def __get__(self, obj, klass=None): 

173 if klass is None: 

174 raise RuntimeError( 

175 "HipsTaskDescriptor was used in an unexpected context" 

176 ) 

177 if self._order is None: 

178 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions 

179 for dim in klassDimensions: 

180 if (match := re.match(r"^healpix(\d*)$", dim)) is not None: 

181 self._order = int(match.group(1)) 

182 break 

183 else: 

184 raise RuntimeError( 

185 "Could not find healpix dimension in connections class" 

186 ) 

187 return self._defaultName.format(self._order) 

188 

189 

190class HighResolutionHipsTask(pipeBase.PipelineTask): 

191 """Task for making high resolution HiPS images.""" 

192 ConfigClass = HighResolutionHipsConfig 

193 _DefaultName = HipsTaskNameDescriptor("highResolutionHips") 

194 

195 def __init__(self, **kwargs): 

196 super().__init__(**kwargs) 

197 self.warper = afwMath.Warper.fromConfig(self.config.warp) 

198 

199 @timeMethod 

200 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

201 inputs = butlerQC.get(inputRefs) 

202 

203 healpix_dim = f"healpix{self.config.hips_order}" 

204 

205 pixels = [hips_exposure.dataId[healpix_dim] 

206 for hips_exposure in outputRefs.hips_exposures] 

207 

208 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs["coadd_exposure_handles"]) 

209 

210 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]: 

211 hips_exposure_ref for hips_exposure_ref in outputRefs.hips_exposures} 

212 for pixel, hips_exposure in outputs.hips_exposures.items(): 

213 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel]) 

214 

215 def run(self, pixels, coadd_exposure_handles): 

216 """Run the HighResolutionHipsTask. 

217 

218 Parameters 

219 ---------- 

220 pixels : `Iterable` [ `int` ] 

221 Iterable of healpix pixels (nest ordering) to warp to. 

222 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`] 

223 Handles for the coadd exposures. 

224 

225 Returns 

226 ------- 

227 outputs : `lsst.pipe.base.Struct` 

228 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value) 

229 """ 

230 self.log.info("Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order) 

231 

232 npix = 2**self.config.shift_order 

233 bbox_hpx = geom.Box2I(corner=geom.Point2I(0, 0), 

234 dimensions=geom.Extent2I(npix, npix)) 

235 

236 # For each healpix pixel we will create an empty exposure with the 

237 # correct HPX WCS. We furthermore create a dict to hold each of 

238 # the warps that will go into each HPX exposure. 

239 exp_hpx_dict = {} 

240 warp_dict = {} 

241 for pixel in pixels: 

242 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order) 

243 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx) 

244 exp_hpx_dict[pixel] = exp_hpx 

245 warp_dict[pixel] = [] 

246 

247 first_handle = True 

248 # Loop over input coadd exposures to minimize i/o (this speeds things 

249 # up by ~8x to batch together pixels that overlap a given coadd). 

250 for handle in coadd_exposure_handles: 

251 coadd_exp = handle.get() 

252 

253 # For each pixel, warp the coadd to the HPX WCS for the pixel. 

254 for pixel in pixels: 

255 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx) 

256 

257 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs()) 

258 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan) 

259 

260 if first_handle: 

261 # Make sure the mask planes, filter, and photocalib of the output 

262 # exposure match the (first) input exposure. 

263 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict()) 

264 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter()) 

265 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib()) 

266 

267 if warped.getBBox().getArea() == 0 or not np.any(np.isfinite(warped.image.array)): 

268 # There is no overlap, skip. 

269 self.log.debug( 

270 "No overlap between output HPX %d and input exposure %s", 

271 pixel, 

272 handle.dataId 

273 ) 

274 continue 

275 

276 exp.maskedImage.assign(warped.maskedImage, warped.getBBox()) 

277 warp_dict[pixel].append(exp.maskedImage) 

278 

279 first_handle = False 

280 

281 stats_flags = afwMath.stringToStatisticsProperty("MEAN") 

282 stats_ctrl = afwMath.StatisticsControl() 

283 stats_ctrl.setNanSafe(True) 

284 stats_ctrl.setWeighted(True) 

285 stats_ctrl.setCalcErrorFromInputVariance(True) 

286 

287 # Loop over pixels and combine the warps for each pixel. 

288 # The combination is done with a simple mean for pixels that 

289 # overlap in neighboring patches. 

290 for pixel in pixels: 

291 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan) 

292 

293 if not warp_dict[pixel]: 

294 # Nothing in this pixel 

295 self.log.debug("No data in HPX pixel %d", pixel) 

296 # Remove the pixel from the output, no need to persist an 

297 # empty exposure. 

298 exp_hpx_dict.pop(pixel) 

299 continue 

300 

301 exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack( 

302 warp_dict[pixel], 

303 stats_flags, 

304 stats_ctrl, 

305 [1.0]*len(warp_dict[pixel]), 

306 clipped=0, 

307 maskMap=[] 

308 ) 

309 

310 return pipeBase.Struct(hips_exposures=exp_hpx_dict) 

311 

312 @classmethod 

313 def build_quantum_graph_cli(cls, argv): 

314 """A command-line interface entry point to `build_quantum_graph`. 

315 This method provides the implementation for the 

316 ``build-high-resolution-hips-qg`` script. 

317 

318 Parameters 

319 ---------- 

320 argv : `Sequence` [ `str` ] 

321 Command-line arguments (e.g. ``sys.argv[1:]``). 

322 """ 

323 parser = cls._make_cli_parser() 

324 

325 args = parser.parse_args(argv) 

326 

327 if args.subparser_name is None: 

328 parser.print_help() 

329 sys.exit(1) 

330 

331 pipeline = pipeBase.Pipeline.from_uri(args.pipeline) 

332 expanded_pipeline = list(pipeline.toExpandedPipeline()) 

333 

334 if len(expanded_pipeline) != 1: 

335 raise RuntimeError(f"Pipeline file {args.pipeline} may only contain one task.") 

336 

337 (task_def,) = expanded_pipeline 

338 

339 butler = Butler(args.butler_config, collections=args.input) 

340 

341 if args.subparser_name == "segment": 

342 # Do the segmentation 

343 hpix_pixelization = HealpixPixelization(level=args.hpix_build_order) 

344 dataset = task_def.connections.coadd_exposure_handles.name 

345 data_ids = set(butler.registry.queryDataIds("tract", datasets=dataset).expanded()) 

346 region_pixels = [] 

347 for data_id in data_ids: 

348 region = data_id.region 

349 pixel_range = hpix_pixelization.envelope(region) 

350 for r in pixel_range.ranges(): 

351 region_pixels.extend(range(r[0], r[1])) 

352 indices = np.unique(region_pixels) 

353 

354 print(f"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:") 

355 for pixel in indices: 

356 print(pixel) 

357 

358 elif args.subparser_name == "build": 

359 # Build the quantum graph. 

360 

361 # Figure out collection names. 

362 if args.output_run is None: 

363 if args.output is None: 

364 raise ValueError("At least one of --output or --output-run options is required.") 

365 args.output_run = "{}/{}".format(args.output, pipeBase.Instrument.makeCollectionTimestamp()) 

366 

367 build_ranges = RangeSet(sorted(args.pixels)) 

368 

369 # Metadata includes a subset of attributes defined in CmdLineFwk. 

370 metadata = { 

371 "input": args.input, 

372 "butler_argument": args.butler_config, 

373 "output": args.output, 

374 "output_run": args.output_run, 

375 "data_query": args.where, 

376 "time": f"{datetime.now()}", 

377 } 

378 

379 qg = cls.build_quantum_graph( 

380 task_def, 

381 butler.registry, 

382 args.hpix_build_order, 

383 build_ranges, 

384 where=args.where, 

385 collections=args.input, 

386 metadata=metadata, 

387 ) 

388 qg.saveUri(args.save_qgraph) 

389 

390 @classmethod 

391 def _make_cli_parser(cls): 

392 """Make the command-line parser. 

393 

394 Returns 

395 ------- 

396 parser : `argparse.ArgumentParser` 

397 """ 

398 parser = argparse.ArgumentParser( 

399 description=( 

400 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets." 

401 ), 

402 ) 

403 subparsers = parser.add_subparsers(help="sub-command help", dest="subparser_name") 

404 

405 parser_segment = subparsers.add_parser("segment", 

406 help="Determine survey segments for workflow.") 

407 parser_build = subparsers.add_parser("build", 

408 help="Build quantum graph for HighResolutionHipsTask") 

409 

410 for sub in [parser_segment, parser_build]: 

411 # These arguments are in common. 

412 sub.add_argument( 

413 "-b", 

414 "--butler-config", 

415 type=str, 

416 help="Path to data repository or butler configuration.", 

417 required=True, 

418 ) 

419 sub.add_argument( 

420 "-p", 

421 "--pipeline", 

422 type=str, 

423 help="Pipeline file, limited to one task.", 

424 required=True, 

425 ) 

426 sub.add_argument( 

427 "-i", 

428 "--input", 

429 type=str, 

430 nargs="+", 

431 help="Input collection(s) to search for coadd exposures.", 

432 required=True, 

433 ) 

434 sub.add_argument( 

435 "-o", 

436 "--hpix_build_order", 

437 type=int, 

438 default=1, 

439 help="HEALPix order to segment sky for building quantum graph files.", 

440 ) 

441 sub.add_argument( 

442 "-w", 

443 "--where", 

444 type=str, 

445 default=None, 

446 help="Data ID expression used when querying for input coadd datasets.", 

447 ) 

448 

449 parser_build.add_argument( 

450 "--output", 

451 type=str, 

452 help=( 

453 "Name of the output CHAINED collection. If this options is specified and " 

454 "--output-run is not, then a new RUN collection will be created by appending " 

455 "a timestamp to the value of this option." 

456 ), 

457 default=None, 

458 metavar="COLL", 

459 ) 

460 parser_build.add_argument( 

461 "--output-run", 

462 type=str, 

463 help=( 

464 "Output RUN collection to write resulting images. If not provided " 

465 "then --output must be provided and a new RUN collection will be created " 

466 "by appending a timestamp to the value passed with --output." 

467 ), 

468 default=None, 

469 metavar="RUN", 

470 ) 

471 parser_build.add_argument( 

472 "-q", 

473 "--save-qgraph", 

474 type=str, 

475 help="Output filename for QuantumGraph.", 

476 required=True, 

477 ) 

478 parser_build.add_argument( 

479 "-P", 

480 "--pixels", 

481 type=int, 

482 nargs="+", 

483 help="Pixels at --hpix_build_order to generate quantum graph.", 

484 required=True, 

485 ) 

486 

487 return parser 

488 

489 @classmethod 

490 def build_quantum_graph( 

491 cls, 

492 task_def, 

493 registry, 

494 constraint_order, 

495 constraint_ranges, 

496 where=None, 

497 collections=None, 

498 metadata=None, 

499 ): 

500 """Generate a `QuantumGraph` for running just this task. 

501 

502 This is a temporary workaround for incomplete butler query support for 

503 HEALPix dimensions. 

504 

505 Parameters 

506 ---------- 

507 task_def : `lsst.pipe.base.TaskDef` 

508 Task definition. 

509 registry : `lsst.daf.butler.Registry` 

510 Client for the butler database. May be read-only. 

511 constraint_order : `int` 

512 HEALPix order used to contrain which quanta are generated, via 

513 ``constraint_indices``. This should be a coarser grid (smaller 

514 order) than the order used for the task's quantum and output data 

515 IDs, and ideally something between the spatial scale of a patch or 

516 the data repository's "common skypix" system (usually ``htm7``). 

517 constraint_ranges : `lsst.sphgeom.RangeSet` 

518 RangeSet which describes constraint pixels (HEALPix NEST, with order 

519 constraint_order) to constrain generated quanta. 

520 where : `str`, optional 

521 A boolean `str` expression of the form accepted by 

522 `Registry.queryDatasets` to constrain input datasets. This may 

523 contain a constraint on tracts, patches, or bands, but not HEALPix 

524 indices. Constraints on tracts and patches should usually be 

525 unnecessary, however - existing coadds that overlap the given 

526 HEALpix indices will be selected without such a constraint, and 

527 providing one may reject some that should normally be included. 

528 collections : `str` or `Iterable` [ `str` ], optional 

529 Collection or collections to search for input datasets, in order. 

530 If not provided, ``registry.defaults.collections`` will be 

531 searched. 

532 metadata : `dict` [ `str`, `Any` ] 

533 Graph metadata. It is required to contain "output_run" key with the 

534 name of the output RUN collection. 

535 """ 

536 config = task_def.config 

537 

538 dataset_types = pipeBase.PipelineDatasetTypes.fromPipeline(pipeline=[task_def], registry=registry) 

539 # Since we know this is the only task in the pipeline, we know there 

540 # is only one overall input and one overall output. 

541 (input_dataset_type,) = dataset_types.inputs 

542 

543 # Extract the main output dataset type (which needs multiple 

544 # DatasetRefs, and tells us the output HPX level), and make a set of 

545 # what remains for more mechanical handling later. 

546 output_dataset_type = dataset_types.outputs[task_def.connections.hips_exposures.name] 

547 incidental_output_dataset_types = dataset_types.outputs.copy() 

548 incidental_output_dataset_types.remove(output_dataset_type) 

549 (hpx_output_dimension,) = (d for d in output_dataset_type.dimensions 

550 if isinstance(d, SkyPixDimension)) 

551 

552 constraint_hpx_pixelization = registry.dimensions[f"healpix{constraint_order}"].pixelization 

553 common_skypix_name = registry.dimensions.commonSkyPix.name 

554 common_skypix_pixelization = registry.dimensions.commonSkyPix.pixelization 

555 

556 # We will need all the pixels at the quantum resolution as well 

557 task_dimensions = registry.dimensions.extract(task_def.connections.dimensions) 

558 (hpx_dimension,) = (d for d in task_dimensions if d.name != "band") 

559 hpx_pixelization = hpx_dimension.pixelization 

560 

561 if hpx_pixelization.level < constraint_order: 

562 raise ValueError(f"Quantum order {hpx_pixelization.level} must be < {constraint_order}") 

563 hpx_ranges = constraint_ranges.scaled(4**(hpx_pixelization.level - constraint_order)) 

564 

565 # We can be generous in looking for pixels here, because we constraint by actual 

566 # patch regions below. 

567 common_skypix_ranges = RangeSet() 

568 for begin, end in constraint_ranges: 

569 for hpx_index in range(begin, end): 

570 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index) 

571 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region) 

572 

573 # To keep the query from getting out of hand (and breaking) we simplify until we have fewer 

574 # than 100 ranges which seems to work fine. 

575 for simp in range(1, 10): 

576 if len(common_skypix_ranges) < 100: 

577 break 

578 common_skypix_ranges.simplify(simp) 

579 

580 # Use that RangeSet to assemble a WHERE constraint expression. This 

581 # could definitely get too big if the "constraint healpix" order is too 

582 # fine. 

583 where_terms = [] 

584 bind = {} 

585 for n, (begin, end) in enumerate(common_skypix_ranges): 

586 stop = end - 1 # registry range syntax is inclusive 

587 if begin == stop: 

588 where_terms.append(f"{common_skypix_name} = cpx{n}") 

589 bind[f"cpx{n}"] = begin 

590 else: 

591 where_terms.append(f"({common_skypix_name} >= cpx{n}a AND {common_skypix_name} <= cpx{n}b)") 

592 bind[f"cpx{n}a"] = begin 

593 bind[f"cpx{n}b"] = stop 

594 if where is None: 

595 where = " OR ".join(where_terms) 

596 else: 

597 where = f"({where}) AND ({' OR '.join(where_terms)})" 

598 # Query for input datasets with this constraint, and ask for expanded 

599 # data IDs because we want regions. Immediately group this by patch so 

600 # we don't do later geometric stuff n_bands more times than we need to. 

601 input_refs = registry.queryDatasets( 

602 input_dataset_type, 

603 where=where, 

604 findFirst=True, 

605 collections=collections, 

606 bind=bind 

607 ).expanded() 

608 inputs_by_patch = defaultdict(set) 

609 patch_dimensions = registry.dimensions.extract(["patch"]) 

610 for input_ref in input_refs: 

611 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(input_ref) 

612 if not inputs_by_patch: 

613 message_body = "\n".join(input_refs.explain_no_results()) 

614 raise RuntimeError(f"No inputs found:\n{message_body}") 

615 

616 # Iterate over patches and compute the set of output healpix pixels 

617 # that overlap each one. Use that to associate inputs with output 

618 # pixels, but only for the output pixels we've already identified. 

619 inputs_by_hpx = defaultdict(set) 

620 for patch_data_id, input_refs_for_patch in inputs_by_patch.items(): 

621 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region) 

622 for begin, end in patch_hpx_ranges & hpx_ranges: 

623 for hpx_index in range(begin, end): 

624 inputs_by_hpx[hpx_index].update(input_refs_for_patch) 

625 # Iterate over the dict we just created and create the actual quanta. 

626 quanta = [] 

627 for hpx_index, input_refs_for_hpx_index in inputs_by_hpx.items(): 

628 # Group inputs by band. 

629 input_refs_by_band = defaultdict(list) 

630 for input_ref in input_refs_for_hpx_index: 

631 input_refs_by_band[input_ref.dataId["band"]].append(input_ref) 

632 # Iterate over bands to make quanta. 

633 for band, input_refs_for_band in input_refs_by_band.items(): 

634 data_id = registry.expandDataId({hpx_dimension: hpx_index, "band": band}) 

635 

636 hpx_pixel_ranges = RangeSet(hpx_index) 

637 hpx_output_ranges = hpx_pixel_ranges.scaled(4**(config.hips_order - hpx_pixelization.level)) 

638 output_data_ids = [] 

639 for begin, end in hpx_output_ranges: 

640 for hpx_output_index in range(begin, end): 

641 output_data_ids.append( 

642 registry.expandDataId({hpx_output_dimension: hpx_output_index, "band": band}) 

643 ) 

644 output_run = metadata["output_run"] 

645 outputs = { 

646 dt: [DatasetRef(dt, data_id, run=output_run)] for dt in incidental_output_dataset_types 

647 } 

648 outputs[output_dataset_type] = [DatasetRef(output_dataset_type, data_id, run=output_run) 

649 for data_id in output_data_ids] 

650 quanta.append( 

651 Quantum( 

652 taskName=task_def.taskName, 

653 taskClass=task_def.taskClass, 

654 dataId=data_id, 

655 initInputs={}, 

656 inputs={input_dataset_type: input_refs_for_band}, 

657 outputs=outputs, 

658 ) 

659 ) 

660 

661 if len(quanta) == 0: 

662 raise RuntimeError("Given constraints yielded empty quantum graph.") 

663 

664 return pipeBase.QuantumGraph(quanta={task_def: quanta}, metadata=metadata) 

665 

666 

667class HipsPropertiesSpectralTerm(pexConfig.Config): 

668 lambda_min = pexConfig.Field( 

669 doc="Minimum wavelength (nm)", 

670 dtype=float, 

671 ) 

672 lambda_max = pexConfig.Field( 

673 doc="Maximum wavelength (nm)", 

674 dtype=float, 

675 ) 

676 

677 

678class HipsPropertiesConfig(pexConfig.Config): 

679 """Configuration parameters for writing a HiPS properties file.""" 

680 creator_did_template = pexConfig.Field( 

681 doc=("Unique identifier of the HiPS - Format: IVOID. " 

682 "Use ``{band}`` to substitute the band name."), 

683 dtype=str, 

684 optional=False, 

685 ) 

686 obs_collection = pexConfig.Field( 

687 doc="Short name of original data set - Format: one word", 

688 dtype=str, 

689 optional=True, 

690 ) 

691 obs_description_template = pexConfig.Field( 

692 doc=("Data set description - Format: free text, longer free text " 

693 "description of the dataset. Use ``{band}`` to substitute " 

694 "the band name."), 

695 dtype=str, 

696 ) 

697 prov_progenitor = pexConfig.ListField( 

698 doc="Provenance of the original data - Format: free text", 

699 dtype=str, 

700 default=[], 

701 ) 

702 obs_title_template = pexConfig.Field( 

703 doc=("Data set title format: free text, but should be short. " 

704 "Use ``{band}`` to substitute the band name."), 

705 dtype=str, 

706 optional=False, 

707 ) 

708 spectral_ranges = pexConfig.ConfigDictField( 

709 doc=("Mapping from band to lambda_min, lamba_max (nm). May be approximate."), 

710 keytype=str, 

711 itemtype=HipsPropertiesSpectralTerm, 

712 default={}, 

713 ) 

714 initial_ra = pexConfig.Field( 

715 doc="Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.", 

716 dtype=float, 

717 optional=True, 

718 ) 

719 initial_dec = pexConfig.Field( 

720 doc="Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.", 

721 dtype=float, 

722 optional=True, 

723 ) 

724 initial_fov = pexConfig.Field( 

725 doc="Initial field-of-view (deg). If not set will use ~1 healpix tile.", 

726 dtype=float, 

727 optional=True, 

728 ) 

729 obs_ack = pexConfig.Field( 

730 doc="Observation acknowledgements (free text).", 

731 dtype=str, 

732 optional=True, 

733 ) 

734 t_min = pexConfig.Field( 

735 doc="Time (MJD) of earliest observation included in HiPS", 

736 dtype=float, 

737 optional=True, 

738 ) 

739 t_max = pexConfig.Field( 

740 doc="Time (MJD) of latest observation included in HiPS", 

741 dtype=float, 

742 optional=True, 

743 ) 

744 

745 def validate(self): 

746 super().validate() 

747 

748 if self.obs_collection is not None: 

749 if re.search(r"\s", self.obs_collection): 

750 raise ValueError("obs_collection cannot contain any space characters.") 

751 

752 def setDefaults(self): 

753 # Values here taken from 

754 # https://github.com/lsst-dm/dax_obscore/blob/44ac15029136e2ec15/configs/dp02.yaml#L46 

755 u_term = HipsPropertiesSpectralTerm() 

756 u_term.lambda_min = 330. 

757 u_term.lambda_max = 400. 

758 self.spectral_ranges["u"] = u_term 

759 g_term = HipsPropertiesSpectralTerm() 

760 g_term.lambda_min = 402. 

761 g_term.lambda_max = 552. 

762 self.spectral_ranges["g"] = g_term 

763 r_term = HipsPropertiesSpectralTerm() 

764 r_term.lambda_min = 552. 

765 r_term.lambda_max = 691. 

766 self.spectral_ranges["r"] = r_term 

767 i_term = HipsPropertiesSpectralTerm() 

768 i_term.lambda_min = 691. 

769 i_term.lambda_max = 818. 

770 self.spectral_ranges["i"] = i_term 

771 z_term = HipsPropertiesSpectralTerm() 

772 z_term.lambda_min = 818. 

773 z_term.lambda_max = 922. 

774 self.spectral_ranges["z"] = z_term 

775 y_term = HipsPropertiesSpectralTerm() 

776 y_term.lambda_min = 970. 

777 y_term.lambda_max = 1060. 

778 self.spectral_ranges["y"] = y_term 

779 

780 

781class GenerateHipsConnections(pipeBase.PipelineTaskConnections, 

782 dimensions=("instrument", "band"), 

783 defaultTemplates={"coaddName": "deep"}): 

784 hips_exposure_handles = pipeBase.connectionTypes.Input( 

785 doc="HiPS-compatible HPX images.", 

786 name="{coaddName}Coadd_hpx", 

787 storageClass="ExposureF", 

788 dimensions=("healpix11", "band"), 

789 multiple=True, 

790 deferLoad=True, 

791 ) 

792 

793 

794class GenerateHipsConfig(pipeBase.PipelineTaskConfig, 

795 pipelineConnections=GenerateHipsConnections): 

796 """Configuration parameters for GenerateHipsTask.""" 

797 # WARNING: In general PipelineTasks are not allowed to do any outputs 

798 # outside of the butler. This task has been given (temporary) 

799 # Special Dispensation because of the nature of HiPS outputs until 

800 # a more controlled solution can be found. 

801 hips_base_uri = pexConfig.Field( 

802 doc="URI to HiPS base for output.", 

803 dtype=str, 

804 optional=False, 

805 ) 

806 min_order = pexConfig.Field( 

807 doc="Minimum healpix order for HiPS tree.", 

808 dtype=int, 

809 default=3, 

810 ) 

811 properties = pexConfig.ConfigField( 

812 dtype=HipsPropertiesConfig, 

813 doc="Configuration for properties file.", 

814 ) 

815 allsky_tilesize = pexConfig.Field( 

816 dtype=int, 

817 doc="Allsky.png tile size. Must be power of 2.", 

818 default=512, 

819 ) 

820 png_gray_asinh_minimum = pexConfig.Field( 

821 doc="AsinhMapping intensity to be mapped to black for grayscale png scaling.", 

822 dtype=float, 

823 default=0.0, 

824 ) 

825 png_gray_asinh_stretch = pexConfig.Field( 

826 doc="AsinhMapping linear stretch for grayscale png scaling.", 

827 dtype=float, 

828 default=2.0, 

829 ) 

830 png_gray_asinh_softening = pexConfig.Field( 

831 doc="AsinhMapping softening parameter (Q) for grayscale png scaling.", 

832 dtype=float, 

833 default=8.0, 

834 ) 

835 

836 

837class GenerateHipsTask(pipeBase.PipelineTask): 

838 """Task for making a HiPS tree with FITS and grayscale PNGs.""" 

839 ConfigClass = GenerateHipsConfig 

840 _DefaultName = "generateHips" 

841 color_task = False 

842 

843 @timeMethod 

844 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

845 inputs = butlerQC.get(inputRefs) 

846 

847 dims = inputRefs.hips_exposure_handles[0].dataId.names 

848 order = None 

849 for dim in dims: 

850 if "healpix" in dim: 

851 order = int(dim.split("healpix")[1]) 

852 healpix_dim = dim 

853 break 

854 else: 

855 raise RuntimeError("Could not determine healpix order for input exposures.") 

856 

857 hips_exposure_handle_dict = { 

858 (hips_exposure_handle.dataId[healpix_dim], 

859 hips_exposure_handle.dataId["band"]): hips_exposure_handle 

860 for hips_exposure_handle in inputs["hips_exposure_handles"] 

861 } 

862 

863 data_bands = {hips_exposure_handle.dataId["band"] 

864 for hips_exposure_handle in inputs["hips_exposure_handles"]} 

865 bands = self._check_data_bands(data_bands) 

866 

867 self.run( 

868 bands=bands, 

869 max_order=order, 

870 hips_exposure_handle_dict=hips_exposure_handle_dict, 

871 do_color=self.color_task, 

872 ) 

873 

874 def _check_data_bands(self, data_bands): 

875 """Check that the data has only a single band. 

876 

877 Parameters 

878 ---------- 

879 data_bands : `set` [`str`] 

880 Bands from the input data. 

881 

882 Returns 

883 ------- 

884 bands : `list` [`str`] 

885 List of single band to process. 

886 

887 Raises 

888 ------ 

889 RuntimeError if there is not exactly one band. 

890 """ 

891 if len(data_bands) != 1: 

892 raise RuntimeError("GenerateHipsTask can only use data from a single band.") 

893 

894 return list(data_bands) 

895 

896 @timeMethod 

897 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False): 

898 """Run the GenerateHipsTask. 

899 

900 Parameters 

901 ---------- 

902 bands : `list [ `str` ] 

903 List of bands to be processed (or single band). 

904 max_order : `int` 

905 HEALPix order of the maximum (native) HPX exposures. 

906 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`} 

907 Dict of handles for the HiPS high-resolution exposures. 

908 Key is (pixel number, ``band``). 

909 do_color : `bool`, optional 

910 Do color pngs instead of per-band grayscale. 

911 """ 

912 min_order = self.config.min_order 

913 

914 if not do_color: 

915 png_grayscale_mapping = AsinhMapping( 

916 self.config.png_gray_asinh_minimum, 

917 self.config.png_gray_asinh_stretch, 

918 Q=self.config.png_gray_asinh_softening, 

919 ) 

920 else: 

921 png_color_mapping = AsinhMapping( 

922 self.config.png_color_asinh_minimum, 

923 self.config.png_color_asinh_stretch, 

924 Q=self.config.png_color_asinh_softening, 

925 ) 

926 

927 bcb = self.config.blue_channel_band 

928 gcb = self.config.green_channel_band 

929 rcb = self.config.red_channel_band 

930 colorstr = f"{bcb}{gcb}{rcb}" 

931 

932 # The base path is based on the hips_base_uri. 

933 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=True) 

934 

935 # We need to unique-ify the pixels because they show up for multiple bands. 

936 # The output of this is a sorted array. 

937 pixels = np.unique(np.array([pixel for pixel, _ in hips_exposure_handle_dict.keys()])) 

938 

939 # Add a "gutter" pixel at the end. Start with 0 which maps to 0 always. 

940 pixels = np.append(pixels, [0]) 

941 

942 # Convert the pixels to each order that will be generated. 

943 pixels_shifted = {} 

944 pixels_shifted[max_order] = pixels 

945 for order in range(max_order - 1, min_order - 1, -1): 

946 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2) 

947 

948 # And set the gutter to an illegal pixel value. 

949 for order in range(min_order, max_order + 1): 

950 pixels_shifted[order][-1] = -1 

951 

952 # Read in the first pixel for determining image properties. 

953 exp0 = list(hips_exposure_handle_dict.values())[0].get() 

954 bbox = exp0.getBBox() 

955 npix = bbox.getWidth() 

956 shift_order = int(np.round(np.log2(npix))) 

957 

958 # Create blank exposures for each level, including the highest order. 

959 # We also make sure we create blank exposures for any bands used in the color 

960 # PNGs, even if they aren't available. 

961 exposures = {} 

962 for band in bands: 

963 for order in range(min_order, max_order + 1): 

964 exp = exp0.Factory(bbox=bbox) 

965 exp.image.array[:, :] = np.nan 

966 exposures[(band, order)] = exp 

967 

968 # Loop over all pixels, avoiding the gutter. 

969 for pixel_counter, pixel in enumerate(pixels[:-1]): 

970 self.log.debug("Working on high resolution pixel %d", pixel) 

971 for band in bands: 

972 # Read all the exposures here for the highest order. 

973 # There will always be at least one band with a HiPS image available 

974 # at the highest order. However, for color images it is possible that 

975 # not all bands have coverage so we require this check. 

976 if (pixel, band) in hips_exposure_handle_dict: 

977 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get() 

978 

979 # Go up the HiPS tree. 

980 # We only write pixels and rebin to fill the parent pixel when we are 

981 # done with a current pixel, which is determined if the next pixel 

982 # has a different pixel number. 

983 for order in range(max_order, min_order - 1, -1): 

984 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]: 

985 # This order is not done, and so none of the other orders will be. 

986 break 

987 

988 # We can now write out the images for each band. 

989 # Note this will always trigger at the max order where each pixel is unique. 

990 if not do_color: 

991 for band in bands: 

992 self._write_hips_image( 

993 hips_base_path.join(f"band_{band}", forceDirectory=True), 

994 order, 

995 pixels_shifted[order][pixel_counter], 

996 exposures[(band, order)].image, 

997 png_grayscale_mapping, 

998 shift_order=shift_order, 

999 ) 

1000 else: 

1001 # Make a color png. 

1002 self._write_hips_color_png( 

1003 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1004 order, 

1005 pixels_shifted[order][pixel_counter], 

1006 exposures[(self.config.red_channel_band, order)].image, 

1007 exposures[(self.config.green_channel_band, order)].image, 

1008 exposures[(self.config.blue_channel_band, order)].image, 

1009 png_color_mapping, 

1010 ) 

1011 

1012 log_level = self.log.INFO if order == (max_order - 3) else self.log.DEBUG 

1013 self.log.log( 

1014 log_level, 

1015 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)", 

1016 ",".join(bands), 

1017 order, 

1018 pixels_shifted[order][pixel_counter], 

1019 pixel_counter, 

1020 len(pixels) - 1, 

1021 ) 

1022 

1023 # When we are at the top of the tree, erase top level images and continue. 

1024 if order == min_order: 

1025 for band in bands: 

1026 exposures[(band, order)].image.array[:, :] = np.nan 

1027 continue 

1028 

1029 # Now average the images for each band. 

1030 for band in bands: 

1031 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2) 

1032 with warnings.catch_warnings(): 

1033 warnings.simplefilter("ignore") 

1034 binned_image_arr = np.nanmean(arr, axis=(1, 3)) 

1035 

1036 # Fill the next level up. We figure out which of the four 

1037 # sub-pixels the current pixel occupies. 

1038 sub_index = (pixels_shifted[order][pixel_counter] 

1039 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2)) 

1040 

1041 # Fill exposure at the next level up. 

1042 exp = exposures[(band, order - 1)] 

1043 

1044 # Fill the correct subregion. 

1045 if sub_index == 0: 

1046 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr 

1047 elif sub_index == 1: 

1048 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr 

1049 elif sub_index == 2: 

1050 exp.image.array[npix//2:, npix//2:] = binned_image_arr 

1051 elif sub_index == 3: 

1052 exp.image.array[0: npix//2, npix//2:] = binned_image_arr 

1053 else: 

1054 # This should be impossible. 

1055 raise ValueError("Illegal pixel sub index") 

1056 

1057 # Erase the previous exposure. 

1058 if order < max_order: 

1059 exposures[(band, order)].image.array[:, :] = np.nan 

1060 

1061 # Write the properties files and MOCs. 

1062 if not do_color: 

1063 for band in bands: 

1064 band_pixels = np.array([pixel 

1065 for pixel, band_ in hips_exposure_handle_dict.keys() 

1066 if band_ == band]) 

1067 band_pixels = np.sort(band_pixels) 

1068 

1069 self._write_properties_and_moc( 

1070 hips_base_path.join(f"band_{band}", forceDirectory=True), 

1071 max_order, 

1072 band_pixels, 

1073 exp0, 

1074 shift_order, 

1075 band, 

1076 False, 

1077 ) 

1078 self._write_allsky_file( 

1079 hips_base_path.join(f"band_{band}", forceDirectory=True), 

1080 min_order, 

1081 ) 

1082 else: 

1083 self._write_properties_and_moc( 

1084 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1085 max_order, 

1086 pixels[:-1], 

1087 exp0, 

1088 shift_order, 

1089 colorstr, 

1090 True, 

1091 ) 

1092 self._write_allsky_file( 

1093 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1094 min_order, 

1095 ) 

1096 

1097 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9): 

1098 """Write a HiPS image. 

1099 

1100 Parameters 

1101 ---------- 

1102 hips_base_path : `lsst.resources.ResourcePath` 

1103 Resource path to the base of the HiPS directory tree. 

1104 order : `int` 

1105 HEALPix order of the HiPS image to write. 

1106 pixel : `int` 

1107 HEALPix pixel of the HiPS image. 

1108 image : `lsst.afw.image.Image` 

1109 Image to write. 

1110 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping` 

1111 Mapping to convert image to scaled png. 

1112 shift_order : `int`, optional 

1113 HPX shift_order. 

1114 """ 

1115 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1116 # outside of the butler. This task has been given (temporary) 

1117 # Special Dispensation because of the nature of HiPS outputs until 

1118 # a more controlled solution can be found. 

1119 

1120 dir_number = self._get_dir_number(pixel) 

1121 hips_dir = hips_base_path.join( 

1122 f"Norder{order}", 

1123 forceDirectory=True 

1124 ).join( 

1125 f"Dir{dir_number}", 

1126 forceDirectory=True 

1127 ) 

1128 

1129 wcs = makeHpxWcs(order, pixel, shift_order=shift_order) 

1130 

1131 uri = hips_dir.join(f"Npix{pixel}.fits") 

1132 

1133 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1134 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata()) 

1135 

1136 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1137 

1138 # And make a grayscale png as well 

1139 

1140 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8) 

1141 vals[~np.isfinite(image.array) | (image.array < 0)] = 0 

1142 im = Image.fromarray(vals[::-1, :], "L") 

1143 

1144 uri = hips_dir.join(f"Npix{pixel}.png") 

1145 

1146 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1147 im.save(temporary_uri.ospath) 

1148 

1149 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1150 

1151 def _write_hips_color_png( 

1152 self, 

1153 hips_base_path, 

1154 order, 

1155 pixel, 

1156 image_red, 

1157 image_green, 

1158 image_blue, 

1159 png_mapping, 

1160 ): 

1161 """Write a color png HiPS image. 

1162 

1163 Parameters 

1164 ---------- 

1165 hips_base_path : `lsst.resources.ResourcePath` 

1166 Resource path to the base of the HiPS directory tree. 

1167 order : `int` 

1168 HEALPix order of the HiPS image to write. 

1169 pixel : `int` 

1170 HEALPix pixel of the HiPS image. 

1171 image_red : `lsst.afw.image.Image` 

1172 Input for red channel of output png. 

1173 image_green : `lsst.afw.image.Image` 

1174 Input for green channel of output png. 

1175 image_blue : `lsst.afw.image.Image` 

1176 Input for blue channel of output png. 

1177 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping` 

1178 Mapping to convert image to scaled png. 

1179 """ 

1180 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1181 # outside of the butler. This task has been given (temporary) 

1182 # Special Dispensation because of the nature of HiPS outputs until 

1183 # a more controlled solution can be found. 

1184 

1185 dir_number = self._get_dir_number(pixel) 

1186 hips_dir = hips_base_path.join( 

1187 f"Norder{order}", 

1188 forceDirectory=True 

1189 ).join( 

1190 f"Dir{dir_number}", 

1191 forceDirectory=True 

1192 ) 

1193 

1194 # We need to convert nans to the minimum values in the mapping. 

1195 arr_red = image_red.array.copy() 

1196 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0] 

1197 arr_green = image_green.array.copy() 

1198 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1] 

1199 arr_blue = image_blue.array.copy() 

1200 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2] 

1201 

1202 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue) 

1203 

1204 im = Image.fromarray(image_array[::-1, :, :], mode="RGB") 

1205 

1206 uri = hips_dir.join(f"Npix{pixel}.png") 

1207 

1208 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1209 im.save(temporary_uri.ospath) 

1210 

1211 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1212 

1213 def _write_properties_and_moc( 

1214 self, 

1215 hips_base_path, 

1216 max_order, 

1217 pixels, 

1218 exposure, 

1219 shift_order, 

1220 band, 

1221 multiband 

1222 ): 

1223 """Write HiPS properties file and MOC. 

1224 

1225 Parameters 

1226 ---------- 

1227 hips_base_path : : `lsst.resources.ResourcePath` 

1228 Resource path to the base of the HiPS directory tree. 

1229 max_order : `int` 

1230 Maximum HEALPix order. 

1231 pixels : `np.ndarray` (N,) 

1232 Array of pixels used. 

1233 exposure : `lsst.afw.image.Exposure` 

1234 Sample HPX exposure used for generating HiPS tiles. 

1235 shift_order : `int` 

1236 HPX shift order. 

1237 band : `str` 

1238 Band (or color). 

1239 multiband : `bool` 

1240 Is band multiband / color? 

1241 """ 

1242 area = hpg.nside_to_pixel_area(2**max_order, degrees=True)*len(pixels) 

1243 

1244 initial_ra = self.config.properties.initial_ra 

1245 initial_dec = self.config.properties.initial_dec 

1246 initial_fov = self.config.properties.initial_fov 

1247 

1248 if initial_ra is None or initial_dec is None or initial_fov is None: 

1249 # We want to point to an arbitrary pixel in the footprint. 

1250 # Just take the median pixel value for simplicity. 

1251 temp_pixels = pixels.copy() 

1252 if temp_pixels.size % 2 == 0: 

1253 temp_pixels = np.append(temp_pixels, [temp_pixels[0]]) 

1254 medpix = int(np.median(temp_pixels)) 

1255 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix) 

1256 _initial_fov = hpg.nside_to_resolution(2**max_order, units='arcminutes')/60. 

1257 

1258 if initial_ra is None or initial_dec is None: 

1259 initial_ra = _initial_ra 

1260 initial_dec = _initial_dec 

1261 if initial_fov is None: 

1262 initial_fov = _initial_fov 

1263 

1264 self._write_hips_properties_file( 

1265 hips_base_path, 

1266 self.config.properties, 

1267 band, 

1268 multiband, 

1269 exposure, 

1270 max_order, 

1271 shift_order, 

1272 area, 

1273 initial_ra, 

1274 initial_dec, 

1275 initial_fov, 

1276 ) 

1277 

1278 # Write the MOC coverage 

1279 self._write_hips_moc_file( 

1280 hips_base_path, 

1281 max_order, 

1282 pixels, 

1283 ) 

1284 

1285 def _write_hips_properties_file( 

1286 self, 

1287 hips_base_path, 

1288 properties_config, 

1289 band, 

1290 multiband, 

1291 exposure, 

1292 max_order, 

1293 shift_order, 

1294 area, 

1295 initial_ra, 

1296 initial_dec, 

1297 initial_fov 

1298 ): 

1299 """Write HiPS properties file. 

1300 

1301 Parameters 

1302 ---------- 

1303 hips_base_path : `lsst.resources.ResourcePath` 

1304 ResourcePath at top of HiPS tree. File will be written 

1305 to this path as ``properties``. 

1306 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig` 

1307 Configuration for properties values. 

1308 band : `str` 

1309 Name of band(s) for HiPS tree. 

1310 multiband : `bool` 

1311 Is multiband / color? 

1312 exposure : `lsst.afw.image.Exposure` 

1313 Sample HPX exposure used for generating HiPS tiles. 

1314 max_order : `int` 

1315 Maximum HEALPix order. 

1316 shift_order : `int` 

1317 HPX shift order. 

1318 area : `float` 

1319 Coverage area in square degrees. 

1320 initial_ra : `float` 

1321 Initial HiPS RA position (degrees). 

1322 initial_dec : `float` 

1323 Initial HiPS Dec position (degrees). 

1324 initial_fov : `float` 

1325 Initial HiPS display size (degrees). 

1326 """ 

1327 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1328 # outside of the butler. This task has been given (temporary) 

1329 # Special Dispensation because of the nature of HiPS outputs until 

1330 # a more controlled solution can be found. 

1331 def _write_property(fh, name, value): 

1332 """Write a property name/value to a file handle. 

1333 

1334 Parameters 

1335 ---------- 

1336 fh : file handle (blah) 

1337 Open for writing. 

1338 name : `str` 

1339 Name of property 

1340 value : `str` 

1341 Value of property 

1342 """ 

1343 # This ensures that the name has no spaces or space-like characters, 

1344 # per the HiPS standard. 

1345 if re.search(r"\s", name): 

1346 raise ValueError(f"``{name}`` cannot contain any space characters.") 

1347 if "=" in name: 

1348 raise ValueError(f"``{name}`` cannot contain an ``=``") 

1349 

1350 fh.write(f"{name:25}= {value}\n") 

1351 

1352 if exposure.image.array.dtype == np.dtype("float32"): 

1353 bitpix = -32 

1354 elif exposure.image.array.dtype == np.dtype("float64"): 

1355 bitpix = -64 

1356 elif exposure.image.array.dtype == np.dtype("int32"): 

1357 bitpix = 32 

1358 

1359 date_iso8601 = datetime.utcnow().isoformat(timespec="seconds") + "Z" 

1360 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units='degrees') 

1361 

1362 uri = hips_base_path.join("properties") 

1363 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1364 with open(temporary_uri.ospath, "w") as fh: 

1365 _write_property( 

1366 fh, 

1367 "creator_did", 

1368 properties_config.creator_did_template.format(band=band), 

1369 ) 

1370 if properties_config.obs_collection is not None: 

1371 _write_property(fh, "obs_collection", properties_config.obs_collection) 

1372 _write_property( 

1373 fh, 

1374 "obs_title", 

1375 properties_config.obs_title_template.format(band=band), 

1376 ) 

1377 if properties_config.obs_description_template is not None: 

1378 _write_property( 

1379 fh, 

1380 "obs_description", 

1381 properties_config.obs_description_template.format(band=band), 

1382 ) 

1383 if len(properties_config.prov_progenitor) > 0: 

1384 for prov_progenitor in properties_config.prov_progenitor: 

1385 _write_property(fh, "prov_progenitor", prov_progenitor) 

1386 if properties_config.obs_ack is not None: 

1387 _write_property(fh, "obs_ack", properties_config.obs_ack) 

1388 _write_property(fh, "obs_regime", "Optical") 

1389 _write_property(fh, "data_pixel_bitpix", str(bitpix)) 

1390 _write_property(fh, "dataproduct_type", "image") 

1391 _write_property(fh, "moc_sky_fraction", str(area/41253.)) 

1392 _write_property(fh, "data_ucd", "phot.flux") 

1393 _write_property(fh, "hips_creation_date", date_iso8601) 

1394 _write_property(fh, "hips_builder", "lsst.pipe.tasks.hips.GenerateHipsTask") 

1395 _write_property(fh, "hips_creator", "Vera C. Rubin Observatory") 

1396 _write_property(fh, "hips_version", "1.4") 

1397 _write_property(fh, "hips_release_date", date_iso8601) 

1398 _write_property(fh, "hips_frame", "equatorial") 

1399 _write_property(fh, "hips_order", str(max_order)) 

1400 _write_property(fh, "hips_tile_width", str(exposure.getBBox().getWidth())) 

1401 _write_property(fh, "hips_status", "private master clonableOnce") 

1402 if multiband: 

1403 _write_property(fh, "hips_tile_format", "png") 

1404 _write_property(fh, "dataproduct_subtype", "color") 

1405 else: 

1406 _write_property(fh, "hips_tile_format", "png fits") 

1407 _write_property(fh, "hips_pixel_bitpix", str(bitpix)) 

1408 _write_property(fh, "hips_pixel_scale", str(pixel_scale)) 

1409 _write_property(fh, "hips_initial_ra", str(initial_ra)) 

1410 _write_property(fh, "hips_initial_dec", str(initial_dec)) 

1411 _write_property(fh, "hips_initial_fov", str(initial_fov)) 

1412 if multiband: 

1413 if self.config.blue_channel_band in properties_config.spectral_ranges: 

1414 em_min = properties_config.spectral_ranges[ 

1415 self.config.blue_channel_band 

1416 ].lambda_min/1e9 

1417 else: 

1418 self.log.warning("blue band %s not in self.config.spectral_ranges.", band) 

1419 em_min = 3e-7 

1420 if self.config.red_channel_band in properties_config.spectral_ranges: 

1421 em_max = properties_config.spectral_ranges[ 

1422 self.config.red_channel_band 

1423 ].lambda_max/1e9 

1424 else: 

1425 self.log.warning("red band %s not in self.config.spectral_ranges.", band) 

1426 em_max = 1e-6 

1427 else: 

1428 if band in properties_config.spectral_ranges: 

1429 em_min = properties_config.spectral_ranges[band].lambda_min/1e9 

1430 em_max = properties_config.spectral_ranges[band].lambda_max/1e9 

1431 else: 

1432 self.log.warning("band %s not in self.config.spectral_ranges.", band) 

1433 em_min = 3e-7 

1434 em_max = 1e-6 

1435 _write_property(fh, "em_min", str(em_min)) 

1436 _write_property(fh, "em_max", str(em_max)) 

1437 if properties_config.t_min is not None: 

1438 _write_property(fh, "t_min", properties_config.t_min) 

1439 if properties_config.t_max is not None: 

1440 _write_property(fh, "t_max", properties_config.t_max) 

1441 

1442 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1443 

1444 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1): 

1445 """Write HiPS MOC file. 

1446 

1447 Parameters 

1448 ---------- 

1449 hips_base_path : `lsst.resources.ResourcePath` 

1450 ResourcePath to top of HiPS tree. File will be written as 

1451 to this path as ``Moc.fits``. 

1452 max_order : `int` 

1453 Maximum HEALPix order. 

1454 pixels : `np.ndarray` 

1455 Array of pixels covered. 

1456 min_uniq_order : `int`, optional 

1457 Minimum HEALPix order for looking for fully covered pixels. 

1458 """ 

1459 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1460 # outside of the butler. This task has been given (temporary) 

1461 # Special Dispensation because of the nature of HiPS outputs until 

1462 # a more controlled solution can be found. 

1463 

1464 # Make the initial list of UNIQ pixels 

1465 uniq = 4*(4**max_order) + pixels 

1466 

1467 # Make a healsparse map which provides easy degrade/comparisons. 

1468 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32) 

1469 hspmap[pixels] = 1.0 

1470 

1471 # Loop over orders, degrade each time, and look for pixels with full coverage. 

1472 for uniq_order in range(max_order - 1, min_uniq_order - 1, -1): 

1473 hspmap = hspmap.degrade(2**uniq_order, reduction="sum") 

1474 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order)) 

1475 # Check if any of the pixels at uniq_order have full coverage. 

1476 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero() 

1477 if covered.size == 0: 

1478 # No pixels at uniq_order are fully covered, we're done. 

1479 break 

1480 # Replace the UNIQ pixels that are fully covered. 

1481 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered] 

1482 

1483 # Remove duplicate pixels. 

1484 uniq = np.unique(uniq) 

1485 

1486 # Output to fits. 

1487 tbl = np.zeros(uniq.size, dtype=[("UNIQ", "i8")]) 

1488 tbl["UNIQ"] = uniq 

1489 

1490 order = np.log2(tbl["UNIQ"]//4).astype(np.int32)//2 

1491 moc_order = np.max(order) 

1492 

1493 hdu = fits.BinTableHDU(tbl) 

1494 hdu.header["PIXTYPE"] = "HEALPIX" 

1495 hdu.header["ORDERING"] = "NUNIQ" 

1496 hdu.header["COORDSYS"] = "C" 

1497 hdu.header["MOCORDER"] = moc_order 

1498 hdu.header["MOCTOOL"] = "lsst.pipe.tasks.hips.GenerateHipsTask" 

1499 

1500 uri = hips_base_path.join("Moc.fits") 

1501 

1502 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1503 hdu.writeto(temporary_uri.ospath) 

1504 

1505 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1506 

1507 def _write_allsky_file(self, hips_base_path, allsky_order): 

1508 """Write an Allsky.png file. 

1509 

1510 Parameters 

1511 ---------- 

1512 hips_base_path : `lsst.resources.ResourcePath` 

1513 Resource path to the base of the HiPS directory tree. 

1514 allsky_order : `int` 

1515 HEALPix order of the minimum order to make allsky file. 

1516 """ 

1517 tile_size = self.config.allsky_tilesize 

1518 n_tiles_per_side = int(np.sqrt(hpg.nside_to_npixel(hpg.order_to_nside(allsky_order)))) 

1519 

1520 allsky_image = None 

1521 

1522 allsky_order_uri = hips_base_path.join(f"Norder{allsky_order}", forceDirectory=True) 

1523 pixel_regex = re.compile(r"Npix([0-9]+)\.png$") 

1524 png_uris = list( 

1525 ResourcePath.findFileResources( 

1526 candidates=[allsky_order_uri], 

1527 file_filter=pixel_regex, 

1528 ) 

1529 ) 

1530 

1531 for png_uri in png_uris: 

1532 matches = re.match(pixel_regex, png_uri.basename()) 

1533 pix_num = int(matches.group(1)) 

1534 tile_image = Image.open(io.BytesIO(png_uri.read())) 

1535 row = math.floor(pix_num//n_tiles_per_side) 

1536 column = pix_num % n_tiles_per_side 

1537 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size) 

1538 tile_image_shrunk = tile_image.resize((tile_size, tile_size)) 

1539 

1540 if allsky_image is None: 

1541 allsky_image = Image.new( 

1542 tile_image.mode, 

1543 (n_tiles_per_side*tile_size, n_tiles_per_side*tile_size), 

1544 ) 

1545 allsky_image.paste(tile_image_shrunk, box) 

1546 

1547 uri = allsky_order_uri.join("Allsky.png") 

1548 

1549 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1550 allsky_image.save(temporary_uri.ospath) 

1551 

1552 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1553 

1554 def _get_dir_number(self, pixel): 

1555 """Compute the directory number from a pixel. 

1556 

1557 Parameters 

1558 ---------- 

1559 pixel : `int` 

1560 HEALPix pixel number. 

1561 

1562 Returns 

1563 ------- 

1564 dir_number : `int` 

1565 HiPS directory number. 

1566 """ 

1567 return (pixel//10000)*10000 

1568 

1569 

1570class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections, 

1571 dimensions=("instrument", ), 

1572 defaultTemplates={"coaddName": "deep"}): 

1573 hips_exposure_handles = pipeBase.connectionTypes.Input( 

1574 doc="HiPS-compatible HPX images.", 

1575 name="{coaddName}Coadd_hpx", 

1576 storageClass="ExposureF", 

1577 dimensions=("healpix11", "band"), 

1578 multiple=True, 

1579 deferLoad=True, 

1580 ) 

1581 

1582 

1583class GenerateColorHipsConfig(GenerateHipsConfig, 

1584 pipelineConnections=GenerateColorHipsConnections): 

1585 """Configuration parameters for GenerateColorHipsTask.""" 

1586 blue_channel_band = pexConfig.Field( 

1587 doc="Band to use for blue channel of color pngs.", 

1588 dtype=str, 

1589 default="g", 

1590 ) 

1591 green_channel_band = pexConfig.Field( 

1592 doc="Band to use for green channel of color pngs.", 

1593 dtype=str, 

1594 default="r", 

1595 ) 

1596 red_channel_band = pexConfig.Field( 

1597 doc="Band to use for red channel of color pngs.", 

1598 dtype=str, 

1599 default="i", 

1600 ) 

1601 png_color_asinh_minimum = pexConfig.Field( 

1602 doc="AsinhMapping intensity to be mapped to black for color png scaling.", 

1603 dtype=float, 

1604 default=0.0, 

1605 ) 

1606 png_color_asinh_stretch = pexConfig.Field( 

1607 doc="AsinhMapping linear stretch for color png scaling.", 

1608 dtype=float, 

1609 default=5.0, 

1610 ) 

1611 png_color_asinh_softening = pexConfig.Field( 

1612 doc="AsinhMapping softening parameter (Q) for color png scaling.", 

1613 dtype=float, 

1614 default=8.0, 

1615 ) 

1616 

1617 

1618class GenerateColorHipsTask(GenerateHipsTask): 

1619 """Task for making a HiPS tree with color pngs.""" 

1620 ConfigClass = GenerateColorHipsConfig 

1621 _DefaultName = "generateColorHips" 

1622 color_task = True 

1623 

1624 def _check_data_bands(self, data_bands): 

1625 """Check the data for configured bands. 

1626 

1627 Warn if any color bands are missing data. 

1628 

1629 Parameters 

1630 ---------- 

1631 data_bands : `set` [`str`] 

1632 Bands from the input data. 

1633 

1634 Returns 

1635 ------- 

1636 bands : `list` [`str`] 

1637 List of bands in bgr color order. 

1638 """ 

1639 if len(data_bands) == 0: 

1640 raise RuntimeError("GenerateColorHipsTask must have data from at least one band.") 

1641 

1642 if self.config.blue_channel_band not in data_bands: 

1643 self.log.warning( 

1644 "Color png blue_channel_band %s not in dataset.", 

1645 self.config.blue_channel_band 

1646 ) 

1647 if self.config.green_channel_band not in data_bands: 

1648 self.log.warning( 

1649 "Color png green_channel_band %s not in dataset.", 

1650 self.config.green_channel_band 

1651 ) 

1652 if self.config.red_channel_band not in data_bands: 

1653 self.log.warning( 

1654 "Color png red_channel_band %s not in dataset.", 

1655 self.config.red_channel_band 

1656 ) 

1657 

1658 bands = [ 

1659 self.config.blue_channel_band, 

1660 self.config.green_channel_band, 

1661 self.config.red_channel_band, 

1662 ] 

1663 

1664 return bands