Coverage for python / lsst / pipe / tasks / hips.py: 13%

732 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-15 00:08 +0000

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22"""Tasks for making and manipulating HIPS images.""" 

23 

24__all__ = [ 

25 "HighResolutionHipsTask", 

26 "HighResolutionHipsConfig", 

27 "HighResolutionHipsConnections", 

28 "HighResolutionHipsQuantumGraphBuilder", 

29 "GenerateHipsTask", 

30 "GenerateHipsConfig", 

31 "GenerateColorHipsTask", 

32 "GenerateColorHipsConfig", 

33] 

34 

35from collections import defaultdict 

36import numpy as np 

37import argparse 

38import colour 

39import io 

40import sys 

41import re 

42import warnings 

43import math 

44from datetime import datetime 

45import hpgeom as hpg 

46import healsparse as hsp 

47from astropy.io import fits 

48 

49try: 

50 from astropy.visualization.lupton_rgb import AsinhMapping 

51except ImportError: 

52 from ._fallback_asinhmapping import AsinhMapping 

53from PIL import Image 

54 

55from lsst.sphgeom import RangeSet, HealpixPixelization 

56from lsst.utils.timer import timeMethod 

57from lsst.daf.butler import Butler 

58from lsst.daf.butler.queries.expression_factory import ExpressionFactory 

59import lsst.pex.config as pexConfig 

60import lsst.pipe.base as pipeBase 

61from lsst.pipe.base.quantum_graph_builder import QuantumGraphBuilder 

62from lsst.pipe.base.quantum_graph_skeleton import QuantumGraphSkeleton 

63import lsst.afw.geom as afwGeom 

64import lsst.afw.math as afwMath 

65import lsst.afw.image as afwImage 

66import lsst.geom as geom 

67from lsst.afw.geom import makeHpxWcs 

68from lsst.resources import ResourcePath 

69 

70from .healSparseMapping import _is_power_of_two 

71from .prettyPictureMaker import PrettyPictureTask, PrettyPictureConfig 

72 

73 

74class HighResolutionHipsConnections( 

75 pipeBase.PipelineTaskConnections, dimensions=("healpix9", "band"), defaultTemplates={"coaddName": "deep"} 

76): 

77 coadd_exposure_handles = pipeBase.connectionTypes.Input( 

78 doc="Coadded exposures to convert to HIPS format.", 

79 name="{coaddName}Coadd_calexp", 

80 storageClass="ExposureF", 

81 dimensions=("tract", "patch", "skymap", "band"), 

82 multiple=True, 

83 deferLoad=True, 

84 ) 

85 hips_exposures = pipeBase.connectionTypes.Output( 

86 doc="HiPS-compatible HPX image.", 

87 name="{coaddName}Coadd_hpx", 

88 storageClass="ExposureF", 

89 dimensions=("healpix11", "band"), 

90 multiple=True, 

91 ) 

92 

93 def __init__(self, *, config=None): 

94 super().__init__(config=config) 

95 

96 quantum_order = None 

97 for dim in self.dimensions: 

98 if "healpix" in dim: 

99 if quantum_order is not None: 

100 raise ValueError("Must not specify more than one quantum healpix dimension.") 

101 quantum_order = int(dim.split("healpix")[1]) 

102 if quantum_order is None: 

103 raise ValueError("Must specify a healpix dimension in quantum dimensions.") 

104 

105 if quantum_order > config.hips_order: 

106 raise ValueError("Quantum healpix dimension order must not be greater than hips_order") 

107 

108 order = None 

109 for dim in self.hips_exposures.dimensions: 

110 if "healpix" in dim: 

111 if order is not None: 

112 raise ValueError("Must not specify more than one healpix dimension.") 

113 order = int(dim.split("healpix")[1]) 

114 if order is None: 

115 raise ValueError("Must specify a healpix dimension in hips_exposure dimensions.") 

116 

117 if order != config.hips_order: 

118 raise ValueError("healpix dimension order must match config.hips_order.") 

119 

120 

121class HighResolutionHipsConfig( 

122 pipeBase.PipelineTaskConfig, pipelineConnections=HighResolutionHipsConnections 

123): 

124 """Configuration parameters for HighResolutionHipsTask. 

125 

126 Notes 

127 ----- 

128 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to 

129 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix 

130 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which 

131 defines the target resolution of the HiPS image. The IVOA recommends 

132 shift_order=9, for 2**9=512 pixels on a side. 

133 

134 Table 5 from 

135 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf 

136 shows the relationship between hips_order, number of tiles (full 

137 sky coverage), cell size, and sub-pixel size/image resolution (with 

138 the default shift_order=9): 

139 +------------+-----------------+--------------+------------------+ 

140 | hips_order | Number of Tiles | Cell Size | Image Resolution | 

141 +============+=================+==============+==================+ 

142 | 0 | 12 | 58.63 deg | 6.871 arcmin | 

143 | 1 | 48 | 29.32 deg | 3.435 arcmin | 

144 | 2 | 192 | 14.66 deg | 1.718 arcmin | 

145 | 3 | 768 | 7.329 deg | 51.53 arcsec | 

146 | 4 | 3072 | 3.665 deg | 25.77 arcsec | 

147 | 5 | 12288 | 1.832 deg | 12.88 arcsec | 

148 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec | 

149 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec | 

150 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec | 

151 | 9 | 3145728 | 6.871 arcmin | 805.2mas | 

152 | 10 | 12582912 | 3.435 arcmin | 402.6mas | 

153 | 11 | 50331648 | 1.718 arcmin | 201.3mas | 

154 | 12 | 201326592 | 51.53 arcsec | 100.6mas | 

155 | 13 | 805306368 | 25.77 arcsec | 50.32mas | 

156 +------------+-----------------+--------------+------------------+ 

157 """ 

158 

159 hips_order = pexConfig.Field( 

160 doc="HIPS image order.", 

161 dtype=int, 

162 default=11, 

163 ) 

164 shift_order = pexConfig.Field( 

165 doc="HIPS shift order (such that each tile is 2**shift_order pixels on a side)", 

166 dtype=int, 

167 default=9, 

168 ) 

169 warp = pexConfig.ConfigField( 

170 dtype=afwMath.Warper.ConfigClass, 

171 doc="Warper configuration", 

172 ) 

173 

174 def setDefaults(self): 

175 self.warp.warpingKernelName = "lanczos5" 

176 

177 

178class HipsTaskNameDescriptor: 

179 """Descriptor used create a DefaultName that matches the order of 

180 the defined dimensions in the connections class. 

181 

182 Parameters 

183 ---------- 

184 prefix : `str` 

185 The prefix of the Default name, to which the order will be 

186 appended. 

187 """ 

188 

189 def __init__(self, prefix): 

190 # create a defaultName template 

191 self._defaultName = f"{prefix}{{}}" 

192 self._order = None 

193 

194 def __get__(self, obj, klass=None): 

195 if klass is None: 

196 raise RuntimeError("HipsTaskDescriptor was used in an unexpected context") 

197 if self._order is None: 

198 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions 

199 for dim in klassDimensions: 

200 if (match := re.match(r"^healpix(\d*)$", dim)) is not None: 

201 self._order = int(match.group(1)) 

202 break 

203 else: 

204 raise RuntimeError("Could not find healpix dimension in connections class") 

205 return self._defaultName.format(self._order) 

206 

207 

208class HighResolutionHipsTask(pipeBase.PipelineTask): 

209 """Task for making high resolution HiPS images.""" 

210 

211 ConfigClass = HighResolutionHipsConfig 

212 _DefaultName = HipsTaskNameDescriptor("highResolutionHips") 

213 

214 def __init__(self, **kwargs): 

215 super().__init__(**kwargs) 

216 self.warper = afwMath.Warper.fromConfig(self.config.warp) 

217 

218 @timeMethod 

219 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

220 inputs = butlerQC.get(inputRefs) 

221 

222 healpix_dim = f"healpix{self.config.hips_order}" 

223 

224 pixels = [hips_exposure.dataId[healpix_dim] for hips_exposure in outputRefs.hips_exposures] 

225 

226 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs["coadd_exposure_handles"]) 

227 

228 hips_exposure_ref_dict = { 

229 hips_exposure_ref.dataId[healpix_dim]: hips_exposure_ref 

230 for hips_exposure_ref in outputRefs.hips_exposures 

231 } 

232 for pixel, hips_exposure in outputs.hips_exposures.items(): 

233 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel]) 

234 

235 def run(self, pixels, coadd_exposure_handles): 

236 """Run the HighResolutionHipsTask. 

237 

238 Parameters 

239 ---------- 

240 pixels : `Iterable` [ `int` ] 

241 Iterable of healpix pixels (nest ordering) to warp to. 

242 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`] 

243 Handles for the coadd exposures. 

244 

245 Returns 

246 ------- 

247 outputs : `lsst.pipe.base.Struct` 

248 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value) 

249 """ 

250 self.log.info("Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order) 

251 

252 npix = 2**self.config.shift_order 

253 bbox_hpx = geom.Box2I(corner=geom.Point2I(0, 0), dimensions=geom.Extent2I(npix, npix)) 

254 

255 # For each healpix pixel we will create an empty exposure with the 

256 # correct HPX WCS. We furthermore create a dict to hold each of 

257 # the warps that will go into each HPX exposure. 

258 exp_hpx_dict = {} 

259 warp_dict = {} 

260 for pixel in pixels: 

261 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order) 

262 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx) 

263 exp_hpx_dict[pixel] = exp_hpx 

264 warp_dict[pixel] = [] 

265 

266 first_handle = True 

267 # Loop over input coadd exposures to minimize i/o (this speeds things 

268 # up by ~8x to batch together pixels that overlap a given coadd). 

269 for handle in coadd_exposure_handles: 

270 coadd_exp = handle.get() 

271 

272 # For each pixel, warp the coadd to the HPX WCS for the pixel. 

273 for pixel in pixels: 

274 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx) 

275 

276 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs()) 

277 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan) 

278 

279 if first_handle: 

280 # Make sure the mask planes, filter, and photocalib of the output 

281 # exposure match the (first) input exposure. 

282 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict()) 

283 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter()) 

284 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib()) 

285 

286 if warped.getBBox().getArea() == 0 or not np.any(np.isfinite(warped.image.array)): 

287 # There is no overlap, skip. 

288 self.log.debug( 

289 "No overlap between output HPX %d and input exposure %s", pixel, handle.dataId 

290 ) 

291 continue 

292 

293 exp.maskedImage.assign(warped.maskedImage, warped.getBBox()) 

294 warp_dict[pixel].append(exp.maskedImage) 

295 

296 first_handle = False 

297 

298 stats_flags = afwMath.stringToStatisticsProperty("MEAN") 

299 stats_ctrl = afwMath.StatisticsControl() 

300 stats_ctrl.setNanSafe(True) 

301 stats_ctrl.setWeighted(True) 

302 stats_ctrl.setCalcErrorFromInputVariance(True) 

303 

304 # Loop over pixels and combine the warps for each pixel. 

305 # The combination is done with a simple mean for pixels that 

306 # overlap in neighboring patches. 

307 for pixel in pixels: 

308 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan) 

309 

310 if not warp_dict[pixel]: 

311 # Nothing in this pixel 

312 self.log.debug("No data in HPX pixel %d", pixel) 

313 # Remove the pixel from the output, no need to persist an 

314 # empty exposure. 

315 exp_hpx_dict.pop(pixel) 

316 continue 

317 

318 exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack( 

319 warp_dict[pixel], 

320 stats_flags, 

321 stats_ctrl, 

322 [1.0] * len(warp_dict[pixel]), 

323 clipped=0, 

324 maskMap=[], 

325 ) 

326 

327 return pipeBase.Struct(hips_exposures=exp_hpx_dict) 

328 

329 @classmethod 

330 def build_quantum_graph_cli(cls, argv): 

331 """A command-line interface entry point to `build_quantum_graph`. 

332 This method provides the implementation for the 

333 ``build-high-resolution-hips-qg`` script. 

334 

335 Parameters 

336 ---------- 

337 argv : `Sequence` [ `str` ] 

338 Command-line arguments (e.g. ``sys.argv[1:]``). 

339 """ 

340 parser = cls._make_cli_parser() 

341 

342 args = parser.parse_args(argv) 

343 

344 if args.subparser_name is None: 

345 parser.print_help() 

346 sys.exit(1) 

347 

348 pipeline = pipeBase.Pipeline.from_uri(args.pipeline) 

349 pipeline_graph = pipeline.to_graph() 

350 

351 if len(pipeline_graph.tasks) != 1: 

352 raise RuntimeError(f"Pipeline file {args.pipeline} may only contain one task.") 

353 

354 (task_node,) = pipeline_graph.tasks.values() 

355 

356 butler = Butler(args.butler_config, collections=args.input) 

357 

358 if args.subparser_name == "segment": 

359 # Do the segmentation 

360 hpix_pixelization = HealpixPixelization(level=args.hpix_build_order) 

361 dataset = task_node.inputs["coadd_exposure_handles"].dataset_type_name 

362 with butler.query() as q: 

363 data_ids = list(q.join_dataset_search(dataset).data_ids("tract").with_dimension_records()) 

364 region_pixels = [] 

365 for data_id in data_ids: 

366 region = data_id.region 

367 pixel_range = hpix_pixelization.envelope(region) 

368 for r in pixel_range.ranges(): 

369 region_pixels.extend(range(r[0], r[1])) 

370 indices = np.unique(region_pixels) 

371 

372 print(f"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:") 

373 for pixel in indices: 

374 print(pixel) 

375 

376 elif args.subparser_name == "build": 

377 # Build the quantum graph. 

378 

379 # Figure out collection names. 

380 if args.output_run is None: 

381 if args.output is None: 

382 raise ValueError("At least one of --output or --output-run options is required.") 

383 args.output_run = "{}/{}".format(args.output, pipeBase.Instrument.makeCollectionTimestamp()) 

384 

385 build_ranges = RangeSet(sorted(args.pixels)) 

386 

387 # Metadata includes a subset of attributes defined in CmdLineFwk. 

388 metadata = { 

389 "input": args.input, 

390 "butler_argument": args.butler_config, 

391 "output": args.output, 

392 "output_run": args.output_run, 

393 "data_query": args.where, 

394 "time": f"{datetime.now()}", 

395 } 

396 

397 builder = HighResolutionHipsQuantumGraphBuilder( 

398 pipeline_graph, 

399 butler, 

400 input_collections=args.input, 

401 output_run=args.output_run, 

402 constraint_order=args.hpix_build_order, 

403 constraint_ranges=build_ranges, 

404 where=args.where, 

405 ) 

406 qg = builder.build(metadata, attach_datastore_records=True) 

407 qg.saveUri(args.save_qgraph) 

408 

409 @classmethod 

410 def _make_cli_parser(cls): 

411 """Make the command-line parser. 

412 

413 Returns 

414 ------- 

415 parser : `argparse.ArgumentParser` 

416 """ 

417 parser = argparse.ArgumentParser( 

418 description=("Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets."), 

419 ) 

420 subparsers = parser.add_subparsers(help="sub-command help", dest="subparser_name") 

421 

422 parser_segment = subparsers.add_parser("segment", help="Determine survey segments for workflow.") 

423 parser_build = subparsers.add_parser("build", help="Build quantum graph for HighResolutionHipsTask") 

424 

425 for sub in [parser_segment, parser_build]: 

426 # These arguments are in common. 

427 sub.add_argument( 

428 "-b", 

429 "--butler-config", 

430 type=str, 

431 help="Path to data repository or butler configuration.", 

432 required=True, 

433 ) 

434 sub.add_argument( 

435 "-p", 

436 "--pipeline", 

437 type=str, 

438 help="Pipeline file, limited to one task.", 

439 required=True, 

440 ) 

441 sub.add_argument( 

442 "-i", 

443 "--input", 

444 type=str, 

445 nargs="+", 

446 help="Input collection(s) to search for coadd exposures.", 

447 required=True, 

448 ) 

449 sub.add_argument( 

450 "-o", 

451 "--hpix_build_order", 

452 type=int, 

453 default=1, 

454 help="HEALPix order to segment sky for building quantum graph files.", 

455 ) 

456 sub.add_argument( 

457 "-w", 

458 "--where", 

459 type=str, 

460 default="", 

461 help="Data ID expression used when querying for input coadd datasets.", 

462 ) 

463 

464 parser_build.add_argument( 

465 "--output", 

466 type=str, 

467 help=( 

468 "Name of the output CHAINED collection. If this options is specified and " 

469 "--output-run is not, then a new RUN collection will be created by appending " 

470 "a timestamp to the value of this option." 

471 ), 

472 default=None, 

473 metavar="COLL", 

474 ) 

475 parser_build.add_argument( 

476 "--output-run", 

477 type=str, 

478 help=( 

479 "Output RUN collection to write resulting images. If not provided " 

480 "then --output must be provided and a new RUN collection will be created " 

481 "by appending a timestamp to the value passed with --output." 

482 ), 

483 default=None, 

484 metavar="RUN", 

485 ) 

486 parser_build.add_argument( 

487 "-q", 

488 "--save-qgraph", 

489 type=str, 

490 help="Output filename for QuantumGraph.", 

491 required=True, 

492 ) 

493 parser_build.add_argument( 

494 "-P", 

495 "--pixels", 

496 type=int, 

497 nargs="+", 

498 help="Pixels at --hpix_build_order to generate quantum graph.", 

499 required=True, 

500 ) 

501 

502 return parser 

503 

504 

505class HighResolutionHipsQuantumGraphBuilder(QuantumGraphBuilder): 

506 """A custom a `lsst.pipe.base.QuantumGraphBuilder` for running 

507 `HighResolutionHipsTask` only. 

508 

509 This is a workaround for incomplete butler query support for HEALPix 

510 dimensions. 

511 

512 Parameters 

513 ---------- 

514 pipeline_graph : `lsst.pipe.base.PipelineGraph` 

515 Pipeline graph with exactly one task, which must be a configuration 

516 of `HighResolutionHipsTask`. 

517 butler : `lsst.daf.butler.Butler` 

518 Client for the butler data repository. May be read-only. 

519 input_collections : `str` or `Iterable` [ `str` ], optional 

520 Collection or collections to search for input datasets, in order. 

521 If not provided, ``butler.collections`` will be searched. 

522 output_run : `str`, optional 

523 Name of the output collection. If not provided, ``butler.run`` will 

524 be used. 

525 constraint_order : `int` 

526 HEALPix order used to constrain which quanta are generated, via 

527 ``constraint_indices``. This should be a coarser grid (smaller 

528 order) than the order used for the task's quantum and output data 

529 IDs, and ideally something between the spatial scale of a patch or 

530 the data repository's "common skypix" system (usually ``htm7``). 

531 constraint_ranges : `lsst.sphgeom.RangeSet` 

532 RangeSet that describes constraint pixels (HEALPix NEST, with order 

533 ``constraint_order``) to constrain generated quanta. 

534 where : `str`, optional 

535 A boolean `str` expression of the form accepted by 

536 `lsst.daf.butler.Butler` to constrain input datasets. This may 

537 contain a constraint on tracts, patches, or bands, but not HEALPix 

538 indices. Constraints on tracts and patches should usually be 

539 unnecessary, however - existing coadds that overlap the given 

540 HEALpix indices will be selected without such a constraint, and 

541 providing one may reject some that should normally be included. 

542 """ 

543 

544 def __init__( 

545 self, 

546 pipeline_graph, 

547 butler, 

548 *, 

549 input_collections=None, 

550 output_run=None, 

551 constraint_order, 

552 constraint_ranges, 

553 where="", 

554 ): 

555 super().__init__(pipeline_graph, butler, input_collections=input_collections, output_run=output_run) 

556 self.constraint_order = constraint_order 

557 self.constraint_ranges = constraint_ranges 

558 self.where = where 

559 

560 def process_subgraph(self, subgraph): 

561 # Docstring inherited. 

562 (task_node,) = subgraph.tasks.values() 

563 

564 # Since we know this is the only task in the pipeline, we know there 

565 # is only one overall input and one regular output. 

566 (input_dataset_type_node,) = subgraph.inputs_of(task_node.label).values() 

567 assert input_dataset_type_node is not None, "PipelineGraph should be resolved by base class." 

568 (output_edge,) = task_node.outputs.values() 

569 output_dataset_type_node = subgraph.dataset_types[output_edge.parent_dataset_type_name] 

570 (hpx_output_dimension,) = ( 

571 self.butler.dimensions.skypix_dimensions[d] for d in output_dataset_type_node.dimensions.skypix 

572 ) 

573 constraint_hpx_pixelization = self.butler.dimensions.skypix_dimensions[ 

574 f"healpix{self.constraint_order}" 

575 ].pixelization 

576 common_skypix_pixelization = self.butler.dimensions.commonSkyPix.pixelization 

577 

578 # We will need all the pixels at the quantum resolution as well. 

579 # '4' appears here frequently because it's the number of pixels at 

580 # level N in a single pixel at level (N-1). 

581 (hpx_dimension,) = ( 

582 self.butler.dimensions.skypix_dimensions[d] for d in task_node.dimensions.names if d != "band" 

583 ) 

584 hpx_pixelization = hpx_dimension.pixelization 

585 if hpx_pixelization.level < self.constraint_order: 

586 raise ValueError(f"Quantum order {hpx_pixelization.level} must be < {self.constraint_order}") 

587 hpx_ranges = self.constraint_ranges.scaled(4 ** (hpx_pixelization.level - self.constraint_order)) 

588 

589 # We can be generous in looking for pixels here, because we constrain 

590 # by actual patch regions below. 

591 common_skypix_ranges = RangeSet() 

592 for begin, end in self.constraint_ranges: 

593 for hpx_index in range(begin, end): 

594 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index) 

595 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region) 

596 

597 # To keep the query from getting out of hand (and breaking) we simplify 

598 # until we have fewer than 100 ranges which seems to work fine. 

599 for simp in range(1, 10): 

600 if len(common_skypix_ranges) < 100: 

601 break 

602 common_skypix_ranges.simplify(simp) 

603 

604 # Use that RangeSet to assemble a WHERE constraint expression. This 

605 # could definitely get too big if the "constraint healpix" order is too 

606 # fine. It's important to use `x IN (a..b)` rather than 

607 # `(x >= a AND x <= b)` to avoid some pessimizations that are present 

608 # in the butler expression handling (at least as of ~w_2025_05). 

609 xf = ExpressionFactory(self.butler.dimensions) 

610 common_skypix_proxy = getattr(xf, self.butler.dimensions.commonSkyPix.name) 

611 where_terms = [] 

612 for begin, end in common_skypix_ranges: 

613 if begin == end - 1: 

614 where_terms.append(common_skypix_proxy == begin) 

615 else: 

616 where_terms.append(common_skypix_proxy.in_range(begin, end)) 

617 where = xf.any(*where_terms) 

618 # Query for input datasets with this constraint, and ask for expanded 

619 # data IDs because we want regions. Immediately group this by patch so 

620 # we don't do later geometric stuff n_bands more times than we need to. 

621 with self.butler.query() as query: 

622 input_refs = ( 

623 query.datasets(input_dataset_type_node.dataset_type, collections=self.input_collections) 

624 .where( 

625 where, 

626 self.where, 

627 ) 

628 .with_dimension_records() 

629 ) 

630 inputs_by_patch = defaultdict(set) 

631 patch_dimensions = self.butler.dimensions.conform(["patch"]) 

632 skeleton = QuantumGraphSkeleton([task_node.label]) 

633 for input_ref in input_refs: 

634 dataset_key = skeleton.add_dataset_node(input_ref.datasetType.name, input_ref.dataId) 

635 skeleton.set_dataset_ref(input_ref, dataset_key) 

636 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(dataset_key) 

637 if not inputs_by_patch: 

638 message_body = "\n".join(input_refs.explain_no_results()) 

639 raise RuntimeError(f"No inputs found:\n{message_body}") 

640 

641 # Iterate over patches and compute the set of output healpix pixels 

642 # that overlap each one. Use that to associate inputs with output 

643 # pixels, but only for the output pixels we've already identified. 

644 inputs_by_hpx = defaultdict(set) 

645 for patch_data_id, input_keys_for_patch in inputs_by_patch.items(): 

646 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region) 

647 for begin, end in patch_hpx_ranges & hpx_ranges: 

648 for hpx_index in range(begin, end): 

649 inputs_by_hpx[hpx_index].update(input_keys_for_patch) 

650 

651 # Iterate over the dict we just created and create preliminary quanta. 

652 for hpx_index, input_keys_for_hpx_index in inputs_by_hpx.items(): 

653 # Group inputs by band. 

654 input_keys_by_band = defaultdict(list) 

655 for input_key in input_keys_for_hpx_index: 

656 input_ref = skeleton.get_dataset_ref(input_key) 

657 assert input_ref is not None, "Code above adds the same nodes to the graph with refs." 

658 input_keys_by_band[input_ref.dataId["band"]].append(input_key) 

659 # Iterate over bands to make quanta. 

660 for band, input_keys_for_band in input_keys_by_band.items(): 

661 data_id = self.butler.registry.expandDataId({hpx_dimension.name: hpx_index, "band": band}) 

662 quantum_key = skeleton.add_quantum_node(task_node.label, data_id) 

663 # Add inputs to the skelton 

664 skeleton.add_input_edges(quantum_key, input_keys_for_band) 

665 # Add the regular outputs. 

666 hpx_pixel_ranges = RangeSet(hpx_index) 

667 hpx_output_ranges = hpx_pixel_ranges.scaled( 

668 4 ** (task_node.config.hips_order - hpx_pixelization.level) 

669 ) 

670 for begin, end in hpx_output_ranges: 

671 for hpx_output_index in range(begin, end): 

672 dataset_key = skeleton.add_dataset_node( 

673 output_dataset_type_node.name, 

674 self.butler.registry.expandDataId( 

675 {hpx_output_dimension: hpx_output_index, "band": band} 

676 ), 

677 ) 

678 skeleton.add_output_edge(quantum_key, dataset_key) 

679 # Add auxiliary outputs (log, metadata). 

680 for write_edge in task_node.iter_all_outputs(): 

681 if write_edge.connection_name == output_edge.connection_name: 

682 continue 

683 dataset_key = skeleton.add_dataset_node(write_edge.parent_dataset_type_name, data_id) 

684 skeleton.add_output_edge(quantum_key, dataset_key) 

685 return skeleton 

686 

687 

688class HipsPropertiesSpectralTerm(pexConfig.Config): 

689 lambda_min = pexConfig.Field( 

690 doc="Minimum wavelength (nm)", 

691 dtype=float, 

692 ) 

693 lambda_max = pexConfig.Field( 

694 doc="Maximum wavelength (nm)", 

695 dtype=float, 

696 ) 

697 

698 

699class HipsPropertiesConfig(pexConfig.Config): 

700 """Configuration parameters for writing a HiPS properties file.""" 

701 

702 creator_did_template = pexConfig.Field( 

703 doc=("Unique identifier of the HiPS - Format: IVOID. Use ``{band}`` to substitute the band name."), 

704 dtype=str, 

705 optional=False, 

706 ) 

707 obs_collection = pexConfig.Field( 

708 doc="Short name of original data set - Format: one word", 

709 dtype=str, 

710 optional=True, 

711 ) 

712 obs_description_template = pexConfig.Field( 

713 doc=( 

714 "Data set description - Format: free text, longer free text " 

715 "description of the dataset. Use ``{band}`` to substitute " 

716 "the band name." 

717 ), 

718 dtype=str, 

719 ) 

720 prov_progenitor = pexConfig.ListField( 

721 doc="Provenance of the original data - Format: free text", 

722 dtype=str, 

723 default=[], 

724 ) 

725 obs_title_template = pexConfig.Field( 

726 doc=( 

727 "Data set title format: free text, but should be short. " 

728 "Use ``{band}`` to substitute the band name." 

729 ), 

730 dtype=str, 

731 optional=False, 

732 ) 

733 spectral_ranges = pexConfig.ConfigDictField( 

734 doc=("Mapping from band to lambda_min, lamba_max (nm). May be approximate."), 

735 keytype=str, 

736 itemtype=HipsPropertiesSpectralTerm, 

737 default={}, 

738 ) 

739 initial_ra = pexConfig.Field( 

740 doc="Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.", 

741 dtype=float, 

742 optional=True, 

743 ) 

744 initial_dec = pexConfig.Field( 

745 doc="Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.", 

746 dtype=float, 

747 optional=True, 

748 ) 

749 initial_fov = pexConfig.Field( 

750 doc="Initial field-of-view (deg). If not set will use ~1 healpix tile.", 

751 dtype=float, 

752 optional=True, 

753 ) 

754 obs_ack = pexConfig.Field( 

755 doc="Observation acknowledgements (free text).", 

756 dtype=str, 

757 optional=True, 

758 ) 

759 t_min = pexConfig.Field( 

760 doc="Time (MJD) of earliest observation included in HiPS", 

761 dtype=float, 

762 optional=True, 

763 ) 

764 t_max = pexConfig.Field( 

765 doc="Time (MJD) of latest observation included in HiPS", 

766 dtype=float, 

767 optional=True, 

768 ) 

769 

770 def validate(self): 

771 super().validate() 

772 

773 if self.obs_collection is not None: 

774 if re.search(r"\s", self.obs_collection): 

775 raise ValueError("obs_collection cannot contain any space characters.") 

776 

777 def setDefaults(self): 

778 # Values here taken from 

779 # https://github.com/lsst-dm/dax_obscore/blob/44ac15029136e2ec15/configs/dp02.yaml#L46 

780 u_term = HipsPropertiesSpectralTerm() 

781 u_term.lambda_min = 330.0 

782 u_term.lambda_max = 400.0 

783 self.spectral_ranges["u"] = u_term 

784 g_term = HipsPropertiesSpectralTerm() 

785 g_term.lambda_min = 402.0 

786 g_term.lambda_max = 552.0 

787 self.spectral_ranges["g"] = g_term 

788 r_term = HipsPropertiesSpectralTerm() 

789 r_term.lambda_min = 552.0 

790 r_term.lambda_max = 691.0 

791 self.spectral_ranges["r"] = r_term 

792 i_term = HipsPropertiesSpectralTerm() 

793 i_term.lambda_min = 691.0 

794 i_term.lambda_max = 818.0 

795 self.spectral_ranges["i"] = i_term 

796 z_term = HipsPropertiesSpectralTerm() 

797 z_term.lambda_min = 818.0 

798 z_term.lambda_max = 922.0 

799 self.spectral_ranges["z"] = z_term 

800 y_term = HipsPropertiesSpectralTerm() 

801 y_term.lambda_min = 970.0 

802 y_term.lambda_max = 1060.0 

803 self.spectral_ranges["y"] = y_term 

804 

805 

806class GenerateHipsConnections( 

807 pipeBase.PipelineTaskConnections, 

808 dimensions=("instrument", "band"), 

809 defaultTemplates={"coaddName": "deep"}, 

810): 

811 hips_exposure_handles = pipeBase.connectionTypes.Input( 

812 doc="HiPS-compatible HPX images.", 

813 name="{coaddName}Coadd_hpx", 

814 storageClass="ExposureF", 

815 dimensions=("healpix11", "band"), 

816 multiple=True, 

817 deferLoad=True, 

818 ) 

819 

820 def __init__(self, *, config): 

821 super().__init__(config=config) 

822 if config.parallel_highest_order: 

823 healpix_dimensions = self.hips_exposure_handles.dimensions 

824 for dim in healpix_dimensions: 

825 if "healpix" in dim: 

826 hdim = dim 

827 

828 current_dimensions = self.dimensions 

829 self.dimensions = set((*current_dimensions, hdim)) 

830 

831 

832class GenerateHipsConfig(pipeBase.PipelineTaskConfig, pipelineConnections=GenerateHipsConnections): 

833 """Configuration parameters for GenerateHipsTask.""" 

834 

835 # WARNING: In general PipelineTasks are not allowed to do any outputs 

836 # outside of the butler. This task has been given (temporary) 

837 # Special Dispensation because of the nature of HiPS outputs until 

838 # a more controlled solution can be found. 

839 hips_base_uri = pexConfig.Field( 

840 doc="URI to HiPS base for output.", 

841 dtype=str, 

842 optional=False, 

843 ) 

844 min_order = pexConfig.Field( 

845 doc="Minimum healpix order for HiPS tree.", 

846 dtype=int, 

847 default=3, 

848 ) 

849 properties = pexConfig.ConfigField( 

850 dtype=HipsPropertiesConfig, 

851 doc="Configuration for properties file.", 

852 ) 

853 allsky_tilesize = pexConfig.Field( 

854 dtype=int, 

855 doc="Allsky tile size; must be power of 2. HiPS standard recommends 64x64 tiles.", 

856 default=64, 

857 check=_is_power_of_two, 

858 ) 

859 png_gray_asinh_minimum = pexConfig.Field( 

860 doc="AsinhMapping intensity to be mapped to black for grayscale png scaling.", 

861 dtype=float, 

862 default=0.0, 

863 ) 

864 png_gray_asinh_stretch = pexConfig.Field( 

865 doc="AsinhMapping linear stretch for grayscale png scaling.", 

866 dtype=float, 

867 default=2.0, 

868 ) 

869 png_gray_asinh_softening = pexConfig.Field( 

870 doc="AsinhMapping softening parameter (Q) for grayscale png scaling.", 

871 dtype=float, 

872 default=8.0, 

873 ) 

874 parallel_highest_order = pexConfig.Field[bool]( 

875 doc=( 

876 "If this is set to True, each of the highest order hips pixels will" 

877 " be put into their own quanta, and can be run in parallel. The " 

878 "trade off is the highest order must be run alone by setting " 

879 "min_order to be the the same as the healpix dimension. This will " 

880 "skip writing all sky info, which must be done in another " 

881 "invocation of this task." 

882 ), 

883 default=False, 

884 ) 

885 skip_highest_image = pexConfig.Field[bool]( 

886 doc=( 

887 "This option should be used if in another task instance " 

888 "parallel_highest_order was set to True. This option will skip " 

889 "making and writing png for the highest order." 

890 ), 

891 default=False, 

892 ) 

893 file_extension = pexConfig.ChoiceField[str]( 

894 doc="Extension for the presisted image, must be png or webp", 

895 allowed={"png": "Use the png image extension", "webp": "Use the webp image extension"}, 

896 default="png", 

897 ) 

898 

899 def validate(self): 

900 if self.parallel_highest_order: 

901 dimensions = self.connections.ConnectionsClass.hips_exposure_handles.dimensions 

902 order = -1 

903 for dim in dimensions: 

904 if "healpix" in dim: 

905 order = int(dim.replace("healpix", "")) 

906 if order == -1: 

907 raise RuntimeError("Could not determine the healpix dim order") 

908 if self.min_order != order: 

909 raise ValueError( 

910 "min_order must be the same as healpix order if parallel_highest_order is True" 

911 ) 

912 if self.skip_highest_image: 

913 raise ValueError("Skip_highest_image should be False when parallel_highest_order is True") 

914 

915 

916class GenerateHipsTask(pipeBase.PipelineTask): 

917 """Task for making a HiPS tree with FITS and grayscale PNGs.""" 

918 

919 ConfigClass = GenerateHipsConfig 

920 _DefaultName = "generateHips" 

921 color_task = False 

922 

923 config: ConfigClass 

924 

925 @timeMethod 

926 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

927 inputs = butlerQC.get(inputRefs) 

928 

929 dims = inputRefs.hips_exposure_handles[0].dataId.dimensions.names 

930 order = None 

931 for dim in dims: 

932 if "healpix" in dim: 

933 order = int(dim.split("healpix")[1]) 

934 healpix_dim = dim 

935 break 

936 else: 

937 raise RuntimeError("Could not determine healpix order for input exposures.") 

938 

939 hips_exposure_handle_dict = { 

940 ( 

941 hips_exposure_handle.dataId[healpix_dim], 

942 hips_exposure_handle.dataId["band"], 

943 ): hips_exposure_handle 

944 for hips_exposure_handle in inputs["hips_exposure_handles"] 

945 } 

946 

947 data_bands = { 

948 hips_exposure_handle.dataId["band"] for hips_exposure_handle in inputs["hips_exposure_handles"] 

949 } 

950 bands = self._check_data_bands(data_bands) 

951 

952 self.run( 

953 bands=bands, 

954 max_order=order, 

955 hips_exposure_handle_dict=hips_exposure_handle_dict, 

956 do_color=self.color_task, 

957 ) 

958 

959 def _check_data_bands(self, data_bands): 

960 """Check that the data has only a single band. 

961 

962 Parameters 

963 ---------- 

964 data_bands : `set` [`str`] 

965 Bands from the input data. 

966 

967 Returns 

968 ------- 

969 bands : `list` [`str`] 

970 List of single band to process. 

971 

972 Raises 

973 ------ 

974 RuntimeError if there is not exactly one band. 

975 """ 

976 if len(data_bands) != 1: 

977 raise RuntimeError("GenerateHipsTask can only use data from a single band.") 

978 

979 return list(data_bands) 

980 

981 @timeMethod 

982 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False): 

983 """Run the GenerateHipsTask. 

984 

985 Parameters 

986 ---------- 

987 bands : `list [ `str` ] 

988 List of bands to be processed (or single band). 

989 max_order : `int` 

990 HEALPix order of the maximum (native) HPX exposures. 

991 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`} 

992 Dict of handles for the HiPS high-resolution exposures. 

993 Key is (pixel number, ``band``). 

994 do_color : `bool`, optional 

995 Do color pngs instead of per-band grayscale. 

996 """ 

997 min_order = self.config.min_order 

998 

999 if not do_color: 

1000 png_grayscale_mapping = AsinhMapping( 

1001 self.config.png_gray_asinh_minimum, 

1002 self.config.png_gray_asinh_stretch, 

1003 Q=self.config.png_gray_asinh_softening, 

1004 ) 

1005 else: 

1006 match self.config.rgbStyle: 

1007 case "lupton": 

1008 png_color_mapping = AsinhMapping( 

1009 self.config.png_color_asinh_minimum, 

1010 self.config.png_color_asinh_stretch, 

1011 Q=self.config.png_color_asinh_softening, 

1012 ) 

1013 

1014 bcb = self.config.blue_channel_band 

1015 gcb = self.config.green_channel_band 

1016 rcb = self.config.red_channel_band 

1017 colorstr = f"{bcb}{gcb}{rcb}" 

1018 case "lsstRGB": 

1019 colorstr = "".join(bands) 

1020 

1021 # The base path is based on the hips_base_uri. 

1022 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=True) 

1023 

1024 # We need to unique-ify the pixels because they show up for multiple bands. 

1025 # The output of this is a sorted array. 

1026 pixels = np.unique(np.array([pixel for pixel, _ in hips_exposure_handle_dict.keys()])) 

1027 

1028 # Add a "gutter" pixel at the end. Start with 0 which maps to 0 always. 

1029 pixels = np.append(pixels, [0]) 

1030 

1031 # Convert the pixels to each order that will be generated. 

1032 pixels_shifted = {} 

1033 pixels_shifted[max_order] = pixels 

1034 for order in range(max_order - 1, min_order - 1, -1): 

1035 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2) 

1036 

1037 # And set the gutter to an illegal pixel value. 

1038 for order in range(min_order, max_order + 1): 

1039 pixels_shifted[order][-1] = -1 

1040 

1041 # Read in the first pixel for determining image properties. 

1042 exp0 = list(hips_exposure_handle_dict.values())[0].get() 

1043 bbox = exp0.getBBox() 

1044 npix = bbox.getWidth() 

1045 shift_order = int(np.round(np.log2(npix))) 

1046 

1047 # Create blank exposures for each level, including the highest order. 

1048 # We also make sure we create blank exposures for any bands used in the color 

1049 # PNGs, even if they aren't available. 

1050 exposures = {} 

1051 for band in bands: 

1052 for order in range(min_order, max_order + 1): 

1053 exp = exp0.Factory(bbox=bbox) 

1054 exp.image.array[:, :] = np.nan 

1055 exposures[(band, order)] = exp 

1056 

1057 # Loop over all pixels, avoiding the gutter. 

1058 for pixel_counter, pixel in enumerate(pixels[:-1]): 

1059 self.log.debug("Working on high resolution pixel %d", pixel) 

1060 for band in bands: 

1061 # Read all the exposures here for the highest order. 

1062 # There will always be at least one band with a HiPS image available 

1063 # at the highest order. However, for color images it is possible that 

1064 # not all bands have coverage so we require this check. 

1065 if (pixel, band) in hips_exposure_handle_dict: 

1066 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get() 

1067 

1068 # Go up the HiPS tree. 

1069 # We only write pixels and rebin to fill the parent pixel when we are 

1070 # done with a current pixel, which is determined if the next pixel 

1071 # has a different pixel number. 

1072 for order in range(max_order, min_order - 1, -1): 

1073 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]: 

1074 # This order is not done, and so none of the other orders will be. 

1075 break 

1076 

1077 # We can now write out the images for each band. 

1078 # Note this will always trigger at the max order where each pixel is unique. 

1079 if self.config.skip_highest_image and order == max_order: 

1080 do_write_image = False 

1081 else: 

1082 do_write_image = True 

1083 

1084 if do_write_image: 

1085 if not do_color: 

1086 for band in bands: 

1087 self._write_hips_image( 

1088 hips_base_path.join(f"band_{band}", forceDirectory=True), 

1089 order, 

1090 pixels_shifted[order][pixel_counter], 

1091 exposures[(band, order)].image, 

1092 png_grayscale_mapping, 

1093 shift_order=shift_order, 

1094 ) 

1095 else: 

1096 # Make a color png. 

1097 lupton_args = {} 

1098 lsstRGB_args = {} 

1099 match self.config.rgbStyle: 

1100 case "lupton": 

1101 lupton_args["image_red"] = exposures[ 

1102 (self.config.red_channel_band, order) 

1103 ].image 

1104 lupton_args["image_green"] = exposures[ 

1105 (self.config.green_channel_band, order) 

1106 ].image 

1107 

1108 lupton_args["image_blue"] = exposures[ 

1109 (self.config.blue_channel_band, order) 

1110 ].image 

1111 

1112 lupton_args["png_mapping"] = png_color_mapping 

1113 case "lsstRGB": 

1114 band_mapping = {} 

1115 for band in bands: 

1116 key = (band, order) 

1117 if (value := exposures.get(key)) is not None: 

1118 band_mapping[band] = value 

1119 lsstRGB_args["band_mapping"] = band_mapping 

1120 

1121 self._write_hips_color_png( 

1122 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1123 order, 

1124 pixels_shifted[order][pixel_counter], 

1125 lupton_args, 

1126 lsstRGB_args, 

1127 ) 

1128 

1129 log_level = self.log.INFO if order == (max_order - 3) else self.log.DEBUG 

1130 self.log.log( 

1131 log_level, 

1132 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)", 

1133 ",".join(bands), 

1134 order, 

1135 pixels_shifted[order][pixel_counter], 

1136 pixel_counter, 

1137 len(pixels) - 1, 

1138 ) 

1139 

1140 # When we are at the top of the tree, erase top level images and continue. 

1141 if order == min_order: 

1142 for band in bands: 

1143 exposures[(band, order)].image.array[:, :] = np.nan 

1144 continue 

1145 

1146 # Now average the images for each band. 

1147 for band in bands: 

1148 arr = exposures[(band, order)].image.array.reshape(npix // 2, 2, npix // 2, 2) 

1149 with warnings.catch_warnings(): 

1150 warnings.simplefilter("ignore") 

1151 binned_image_arr = np.nanmean(arr, axis=(1, 3)) 

1152 

1153 # Fill the next level up. We figure out which of the four 

1154 # sub-pixels the current pixel occupies. 

1155 sub_index = pixels_shifted[order][pixel_counter] - np.left_shift( 

1156 pixels_shifted[order - 1][pixel_counter], 2 

1157 ) 

1158 

1159 # Fill exposure at the next level up. 

1160 exp = exposures[(band, order - 1)] 

1161 

1162 # Fill the correct subregion. 

1163 if sub_index == 0: 

1164 exp.image.array[npix // 2 :, 0 : npix // 2] = binned_image_arr # noqa: E203 

1165 elif sub_index == 1: 

1166 exp.image.array[0 : npix // 2, 0 : npix // 2] = binned_image_arr # noqa: E203 

1167 elif sub_index == 2: 

1168 exp.image.array[npix // 2 :, npix // 2 :] = binned_image_arr # noqa: E203 

1169 elif sub_index == 3: 

1170 exp.image.array[0 : npix // 2, npix // 2 :] = binned_image_arr # noqa: E203 

1171 else: 

1172 # This should be impossible. 

1173 raise ValueError("Illegal pixel sub index") 

1174 

1175 # Erase the previous exposure. 

1176 if order < max_order: 

1177 exposures[(band, order)].image.array[:, :] = np.nan 

1178 

1179 if not self.config.parallel_highest_order: 

1180 # Write the properties files and MOCs. 

1181 if not do_color: 

1182 for band in bands: 

1183 band_pixels = np.array( 

1184 [pixel for pixel, band_ in hips_exposure_handle_dict.keys() if band_ == band] 

1185 ) 

1186 band_pixels = np.sort(band_pixels) 

1187 

1188 self._write_properties_and_moc( 

1189 hips_base_path.join(f"band_{band}", forceDirectory=True), 

1190 max_order, 

1191 band_pixels, 

1192 exp0, 

1193 shift_order, 

1194 band, 

1195 False, 

1196 ) 

1197 self._write_allsky_file( 

1198 hips_base_path.join(f"band_{band}", forceDirectory=True), 

1199 min_order, 

1200 ) 

1201 else: 

1202 self._write_properties_and_moc( 

1203 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1204 max_order, 

1205 pixels[:-1], 

1206 exp0, 

1207 shift_order, 

1208 colorstr, 

1209 True, 

1210 ) 

1211 self._write_allsky_file( 

1212 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1213 min_order, 

1214 ) 

1215 

1216 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9): 

1217 """Write a HiPS image. 

1218 

1219 Parameters 

1220 ---------- 

1221 hips_base_path : `lsst.resources.ResourcePath` 

1222 Resource path to the base of the HiPS directory tree. 

1223 order : `int` 

1224 HEALPix order of the HiPS image to write. 

1225 pixel : `int` 

1226 HEALPix pixel of the HiPS image. 

1227 image : `lsst.afw.image.Image` 

1228 Image to write. 

1229 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping` 

1230 Mapping to convert image to scaled png. 

1231 shift_order : `int`, optional 

1232 HPX shift_order. 

1233 """ 

1234 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1235 # outside of the butler. This task has been given (temporary) 

1236 # Special Dispensation because of the nature of HiPS outputs until 

1237 # a more controlled solution can be found. 

1238 

1239 dir_number = self._get_dir_number(pixel) 

1240 hips_dir = hips_base_path.join(f"Norder{order}", forceDirectory=True).join( 

1241 f"Dir{dir_number}", forceDirectory=True 

1242 ) 

1243 

1244 wcs = makeHpxWcs(order, pixel, shift_order=shift_order) 

1245 

1246 uri = hips_dir.join(f"Npix{pixel}.fits") 

1247 

1248 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1249 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata()) 

1250 

1251 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1252 

1253 # And make a grayscale png as well 

1254 

1255 with np.errstate(invalid="ignore"): 

1256 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8) 

1257 

1258 vals[~np.isfinite(image.array) | (image.array < 0)] = 0 

1259 im = Image.fromarray(vals[::-1, :], "L") 

1260 

1261 uri = hips_dir.join(f"Npix{pixel}.{self.config.file_extension}") 

1262 

1263 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1264 im.save(temporary_uri.ospath) 

1265 

1266 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1267 

1268 def _write_hips_color_png(self, hips_base_path, order, pixel, lupton_args, lsstRGB_args): 

1269 """Write a color png HiPS image. 

1270 

1271 Parameters 

1272 ---------- 

1273 hips_base_path : `lsst.resources.ResourcePath` 

1274 Resource path to the base of the HiPS directory tree. 

1275 order : `int` 

1276 HEALPix order of the HiPS image to write. 

1277 pixel : `int` 

1278 HEALPix pixel of the HiPS image. 

1279 lupton_args : `dict` 

1280 A mapping of parameters used when building lupton color images. 

1281 lsstRGB_args : `dict` 

1282 A mapping of parameters used when building lsstRGB color images. 

1283 """ 

1284 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1285 # outside of the butler. This task has been given (temporary) 

1286 # Special Dispensation because of the nature of HiPS outputs until 

1287 # a more controlled solution can be found. 

1288 

1289 dir_number = self._get_dir_number(pixel) 

1290 hips_dir = hips_base_path.join(f"Norder{order}", forceDirectory=True).join( 

1291 f"Dir{dir_number}", forceDirectory=True 

1292 ) 

1293 match self.config.rgbStyle: 

1294 case "lupton": 

1295 # We need to convert nans to the minimum values in the mapping. 

1296 png_mapping = lupton_args["png_mapping"] 

1297 arr_red = lupton_args["image_red"].array.copy() 

1298 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0] 

1299 arr_green = lupton_args["image_green"].array.copy() 

1300 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1] 

1301 arr_blue = lupton_args["image_blue"].array.copy() 

1302 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2] 

1303 

1304 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue) 

1305 case "lsstRGB": 

1306 image_array = self.rgbGenerator.run(lsstRGB_args["band_mapping"]).outputRGB 

1307 

1308 im = Image.fromarray(image_array[::-1, :, :], mode="RGB") 

1309 

1310 uri = hips_dir.join(f"Npix{pixel}.{self.config.file_extension}") 

1311 

1312 extra_args = {} 

1313 if self.config.file_extension == "webp": 

1314 extra_args["lossless"] = True 

1315 extra_args["quality"] = 80 

1316 

1317 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1318 im.save(temporary_uri.ospath, **extra_args) 

1319 

1320 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1321 

1322 def _write_properties_and_moc( 

1323 self, hips_base_path, max_order, pixels, exposure, shift_order, band, multiband 

1324 ): 

1325 """Write HiPS properties file and MOC. 

1326 

1327 Parameters 

1328 ---------- 

1329 hips_base_path : : `lsst.resources.ResourcePath` 

1330 Resource path to the base of the HiPS directory tree. 

1331 max_order : `int` 

1332 Maximum HEALPix order. 

1333 pixels : `np.ndarray` (N,) 

1334 Array of pixels used. 

1335 exposure : `lsst.afw.image.Exposure` 

1336 Sample HPX exposure used for generating HiPS tiles. 

1337 shift_order : `int` 

1338 HPX shift order. 

1339 band : `str` 

1340 Band (or color). 

1341 multiband : `bool` 

1342 Is band multiband / color? 

1343 """ 

1344 area = hpg.nside_to_pixel_area(2**max_order, degrees=True) * len(pixels) 

1345 

1346 initial_ra = self.config.properties.initial_ra 

1347 initial_dec = self.config.properties.initial_dec 

1348 initial_fov = self.config.properties.initial_fov 

1349 

1350 if initial_ra is None or initial_dec is None or initial_fov is None: 

1351 # We want to point to an arbitrary pixel in the footprint. 

1352 # Just take the median pixel value for simplicity. 

1353 temp_pixels = pixels.copy() 

1354 if temp_pixels.size % 2 == 0: 

1355 temp_pixels = np.append(temp_pixels, [temp_pixels[0]]) 

1356 medpix = int(np.median(temp_pixels)) 

1357 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix) 

1358 _initial_fov = hpg.nside_to_resolution(2**max_order, units="arcminutes") / 60.0 

1359 

1360 if initial_ra is None or initial_dec is None: 

1361 initial_ra = _initial_ra 

1362 initial_dec = _initial_dec 

1363 if initial_fov is None: 

1364 initial_fov = _initial_fov 

1365 

1366 self._write_hips_properties_file( 

1367 hips_base_path, 

1368 self.config.properties, 

1369 band, 

1370 multiband, 

1371 exposure, 

1372 max_order, 

1373 shift_order, 

1374 area, 

1375 initial_ra, 

1376 initial_dec, 

1377 initial_fov, 

1378 ) 

1379 

1380 # Write the MOC coverage 

1381 self._write_hips_moc_file( 

1382 hips_base_path, 

1383 max_order, 

1384 pixels, 

1385 ) 

1386 

1387 def _write_hips_properties_file( 

1388 self, 

1389 hips_base_path, 

1390 properties_config, 

1391 band, 

1392 multiband, 

1393 exposure, 

1394 max_order, 

1395 shift_order, 

1396 area, 

1397 initial_ra, 

1398 initial_dec, 

1399 initial_fov, 

1400 ): 

1401 """Write HiPS properties file. 

1402 

1403 Parameters 

1404 ---------- 

1405 hips_base_path : `lsst.resources.ResourcePath` 

1406 ResourcePath at top of HiPS tree. File will be written 

1407 to this path as ``properties``. 

1408 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig` 

1409 Configuration for properties values. 

1410 band : `str` 

1411 Name of band(s) for HiPS tree. 

1412 multiband : `bool` 

1413 Is multiband / color? 

1414 exposure : `lsst.afw.image.Exposure` 

1415 Sample HPX exposure used for generating HiPS tiles. 

1416 max_order : `int` 

1417 Maximum HEALPix order. 

1418 shift_order : `int` 

1419 HPX shift order. 

1420 area : `float` 

1421 Coverage area in square degrees. 

1422 initial_ra : `float` 

1423 Initial HiPS RA position (degrees). 

1424 initial_dec : `float` 

1425 Initial HiPS Dec position (degrees). 

1426 initial_fov : `float` 

1427 Initial HiPS display size (degrees). 

1428 """ 

1429 

1430 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1431 # outside of the butler. This task has been given (temporary) 

1432 # Special Dispensation because of the nature of HiPS outputs until 

1433 # a more controlled solution can be found. 

1434 def _write_property(fh, name, value): 

1435 """Write a property name/value to a file handle. 

1436 

1437 Parameters 

1438 ---------- 

1439 fh : file handle (blah) 

1440 Open for writing. 

1441 name : `str` 

1442 Name of property 

1443 value : `str` 

1444 Value of property 

1445 """ 

1446 # This ensures that the name has no spaces or space-like characters, 

1447 # per the HiPS standard. 

1448 if re.search(r"\s", name): 

1449 raise ValueError(f"``{name}`` cannot contain any space characters.") 

1450 if "=" in name: 

1451 raise ValueError(f"``{name}`` cannot contain an ``=``") 

1452 

1453 fh.write(f"{name:25}= {value}\n") 

1454 

1455 if exposure.image.array.dtype == np.dtype("float32"): 

1456 bitpix = -32 

1457 elif exposure.image.array.dtype == np.dtype("float64"): 

1458 bitpix = -64 

1459 elif exposure.image.array.dtype == np.dtype("int32"): 

1460 bitpix = 32 

1461 

1462 date_iso8601 = datetime.utcnow().isoformat(timespec="seconds") + "Z" 

1463 pixel_scale = hpg.nside_to_resolution(2 ** (max_order + shift_order), units="degrees") 

1464 

1465 uri = hips_base_path.join("properties") 

1466 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1467 with open(temporary_uri.ospath, "w") as fh: 

1468 _write_property( 

1469 fh, 

1470 "creator_did", 

1471 properties_config.creator_did_template.format(band=band), 

1472 ) 

1473 if properties_config.obs_collection is not None: 

1474 _write_property(fh, "obs_collection", properties_config.obs_collection) 

1475 _write_property( 

1476 fh, 

1477 "obs_title", 

1478 properties_config.obs_title_template.format(band=band), 

1479 ) 

1480 if properties_config.obs_description_template is not None: 

1481 _write_property( 

1482 fh, 

1483 "obs_description", 

1484 properties_config.obs_description_template.format(band=band), 

1485 ) 

1486 if len(properties_config.prov_progenitor) > 0: 

1487 for prov_progenitor in properties_config.prov_progenitor: 

1488 _write_property(fh, "prov_progenitor", prov_progenitor) 

1489 if properties_config.obs_ack is not None: 

1490 _write_property(fh, "obs_ack", properties_config.obs_ack) 

1491 _write_property(fh, "obs_regime", "Optical") 

1492 _write_property(fh, "data_pixel_bitpix", str(bitpix)) 

1493 _write_property(fh, "dataproduct_type", "image") 

1494 _write_property(fh, "moc_sky_fraction", str(area / 41253.0)) 

1495 _write_property(fh, "data_ucd", "phot.flux") 

1496 _write_property(fh, "hips_creation_date", date_iso8601) 

1497 _write_property(fh, "hips_builder", "lsst.pipe.tasks.hips.GenerateHipsTask") 

1498 _write_property(fh, "hips_creator", "Vera C. Rubin Observatory") 

1499 _write_property(fh, "hips_version", "1.4") 

1500 _write_property(fh, "hips_release_date", date_iso8601) 

1501 _write_property(fh, "hips_frame", "equatorial") 

1502 _write_property(fh, "hips_order", str(max_order)) 

1503 _write_property(fh, "hips_tile_width", str(exposure.getBBox().getWidth())) 

1504 _write_property(fh, "hips_status", "private master clonableOnce") 

1505 if multiband: 

1506 _write_property(fh, "hips_tile_format", self.config.file_extension) 

1507 _write_property(fh, "dataproduct_subtype", "color") 

1508 else: 

1509 _write_property(fh, "hips_tile_format", f"{self.config.file_extension} fits") 

1510 _write_property(fh, "hips_pixel_bitpix", str(bitpix)) 

1511 _write_property(fh, "hips_pixel_scale", str(pixel_scale)) 

1512 _write_property(fh, "hips_initial_ra", str(initial_ra)) 

1513 _write_property(fh, "hips_initial_dec", str(initial_dec)) 

1514 _write_property(fh, "hips_initial_fov", str(initial_fov)) 

1515 if multiband: 

1516 if self.config.blue_channel_band in properties_config.spectral_ranges: 

1517 em_min = ( 

1518 properties_config.spectral_ranges[self.config.blue_channel_band].lambda_min / 1e9 

1519 ) 

1520 else: 

1521 self.log.warning("blue band %s not in self.config.spectral_ranges.", band) 

1522 em_min = 3e-7 

1523 if self.config.red_channel_band in properties_config.spectral_ranges: 

1524 em_max = ( 

1525 properties_config.spectral_ranges[self.config.red_channel_band].lambda_max / 1e9 

1526 ) 

1527 else: 

1528 self.log.warning("red band %s not in self.config.spectral_ranges.", band) 

1529 em_max = 1e-6 

1530 else: 

1531 if band in properties_config.spectral_ranges: 

1532 em_min = properties_config.spectral_ranges[band].lambda_min / 1e9 

1533 em_max = properties_config.spectral_ranges[band].lambda_max / 1e9 

1534 else: 

1535 self.log.warning("band %s not in self.config.spectral_ranges.", band) 

1536 em_min = 3e-7 

1537 em_max = 1e-6 

1538 _write_property(fh, "em_min", str(em_min)) 

1539 _write_property(fh, "em_max", str(em_max)) 

1540 if properties_config.t_min is not None: 

1541 _write_property(fh, "t_min", properties_config.t_min) 

1542 if properties_config.t_max is not None: 

1543 _write_property(fh, "t_max", properties_config.t_max) 

1544 

1545 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1546 

1547 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1): 

1548 """Write HiPS MOC file. 

1549 

1550 Parameters 

1551 ---------- 

1552 hips_base_path : `lsst.resources.ResourcePath` 

1553 ResourcePath to top of HiPS tree. File will be written as 

1554 to this path as ``Moc.fits``. 

1555 max_order : `int` 

1556 Maximum HEALPix order. 

1557 pixels : `np.ndarray` 

1558 Array of pixels covered. 

1559 min_uniq_order : `int`, optional 

1560 Minimum HEALPix order for looking for fully covered pixels. 

1561 """ 

1562 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1563 # outside of the butler. This task has been given (temporary) 

1564 # Special Dispensation because of the nature of HiPS outputs until 

1565 # a more controlled solution can be found. 

1566 

1567 # Make the initial list of UNIQ pixels 

1568 uniq = 4 * (4**max_order) + pixels 

1569 

1570 # Make a healsparse map which provides easy degrade/comparisons. 

1571 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32) 

1572 hspmap[pixels] = 1.0 

1573 

1574 # Loop over orders, degrade each time, and look for pixels with full coverage. 

1575 for uniq_order in range(max_order - 1, min_uniq_order - 1, -1): 

1576 hspmap = hspmap.degrade(2**uniq_order, reduction="sum") 

1577 pix_shift = np.right_shift(pixels, 2 * (max_order - uniq_order)) 

1578 # Check if any of the pixels at uniq_order have full coverage. 

1579 (covered,) = np.isclose(hspmap[pix_shift], 4 ** (max_order - uniq_order)).nonzero() 

1580 if covered.size == 0: 

1581 # No pixels at uniq_order are fully covered, we're done. 

1582 break 

1583 # Replace the UNIQ pixels that are fully covered. 

1584 uniq[covered] = 4 * (4**uniq_order) + pix_shift[covered] 

1585 

1586 # Remove duplicate pixels. 

1587 uniq = np.unique(uniq) 

1588 

1589 # Output to fits. 

1590 tbl = np.zeros(uniq.size, dtype=[("UNIQ", "i8")]) 

1591 tbl["UNIQ"] = uniq 

1592 

1593 order = np.log2(tbl["UNIQ"] // 4).astype(np.int32) // 2 

1594 moc_order = np.max(order) 

1595 

1596 hdu = fits.BinTableHDU(tbl) 

1597 hdu.header["PIXTYPE"] = "HEALPIX" 

1598 hdu.header["ORDERING"] = "NUNIQ" 

1599 hdu.header["COORDSYS"] = "C" 

1600 hdu.header["MOCORDER"] = moc_order 

1601 hdu.header["MOCTOOL"] = "lsst.pipe.tasks.hips.GenerateHipsTask" 

1602 

1603 uri = hips_base_path.join("Moc.fits") 

1604 

1605 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1606 hdu.writeto(temporary_uri.ospath) 

1607 

1608 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1609 

1610 def _write_allsky_file(self, hips_base_path, allsky_order): 

1611 """Write an Allsky.png file. 

1612 

1613 Parameters 

1614 ---------- 

1615 hips_base_path : `lsst.resources.ResourcePath` 

1616 Resource path to the base of the HiPS directory tree. 

1617 allsky_order : `int` 

1618 HEALPix order of the minimum order to make allsky file. 

1619 """ 

1620 tile_size = self.config.allsky_tilesize 

1621 

1622 # The Allsky file format is described in 

1623 # https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf 

1624 # From S4.3.2: 

1625 # The Allsky file is built as an array of tiles, stored side by side in 

1626 # the left-to-right order. The width of this array must be the square 

1627 # root of the number of the tiles of the order. For instance, the width 

1628 # of this array at order 3 is 27 ( (int)sqrt(768) ). To avoid having a 

1629 # too large Allsky file, the resolution of each tile may be reduced but 

1630 # must stay a power of two (typically 64x64 pixels rather than 512x512). 

1631 

1632 n_tiles = hpg.nside_to_npixel(hpg.order_to_nside(allsky_order)) 

1633 n_tiles_wide = int(np.floor(np.sqrt(n_tiles))) 

1634 n_tiles_high = int(np.ceil(n_tiles / n_tiles_wide)) 

1635 

1636 allsky_image = None 

1637 

1638 allsky_order_uri = hips_base_path.join(f"Norder{allsky_order}", forceDirectory=True) 

1639 if self.config.file_extension == "png": 

1640 pixel_regex = re.compile(r"Npix([0-9]+)\.png$") 

1641 elif self.config.file_extension == "webp": 

1642 pixel_regex = re.compile(r"Npix([0-9]+)\.webp$") 

1643 else: 

1644 raise RuntimeError("Unknown file extension") 

1645 

1646 png_uris = list( 

1647 ResourcePath.findFileResources( 

1648 candidates=[allsky_order_uri], 

1649 file_filter=pixel_regex, 

1650 ) 

1651 ) 

1652 

1653 for png_uri in png_uris: 

1654 matches = re.match(pixel_regex, png_uri.basename()) 

1655 pix_num = int(matches.group(1)) 

1656 tile_image = Image.open(io.BytesIO(png_uri.read())) 

1657 row = math.floor(pix_num // n_tiles_wide) 

1658 column = pix_num % n_tiles_wide 

1659 box = (column * tile_size, row * tile_size, (column + 1) * tile_size, (row + 1) * tile_size) 

1660 tile_image_shrunk = tile_image.resize((tile_size, tile_size)) 

1661 

1662 if allsky_image is None: 

1663 allsky_image = Image.new( 

1664 tile_image.mode, 

1665 (n_tiles_wide * tile_size, n_tiles_high * tile_size), 

1666 ) 

1667 allsky_image.paste(tile_image_shrunk, box) 

1668 

1669 uri = allsky_order_uri.join(f"Allsky.{self.config.file_extension}") 

1670 

1671 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1672 allsky_image.save(temporary_uri.ospath) 

1673 

1674 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1675 

1676 def _get_dir_number(self, pixel): 

1677 """Compute the directory number from a pixel. 

1678 

1679 Parameters 

1680 ---------- 

1681 pixel : `int` 

1682 HEALPix pixel number. 

1683 

1684 Returns 

1685 ------- 

1686 dir_number : `int` 

1687 HiPS directory number. 

1688 """ 

1689 return (pixel // 10000) * 10000 

1690 

1691 

1692class GenerateColorHipsConnections( 

1693 pipeBase.PipelineTaskConnections, dimensions=("instrument",), defaultTemplates={"coaddName": "deep"} 

1694): 

1695 hips_exposure_handles = pipeBase.connectionTypes.Input( 

1696 doc="HiPS-compatible HPX images.", 

1697 name="{coaddName}Coadd_hpx", 

1698 storageClass="ExposureF", 

1699 dimensions=("healpix11", "band"), 

1700 multiple=True, 

1701 deferLoad=True, 

1702 ) 

1703 

1704 def __init__(self, *, config): 

1705 super().__init__(config=config) 

1706 if config.parallel_highest_order: 

1707 healpix_dimensions = self.hips_exposure_handles.dimensions 

1708 for dim in healpix_dimensions: 

1709 if "healpix" in dim: 

1710 hdim = dim 

1711 

1712 current_dimensions = self.dimensions 

1713 self.dimensions = set((*current_dimensions, hdim)) 

1714 

1715 

1716class GenerateColorHipsConfig(GenerateHipsConfig, pipelineConnections=GenerateColorHipsConnections): 

1717 """Configuration parameters for GenerateColorHipsTask.""" 

1718 

1719 blue_channel_band = pexConfig.Field( 

1720 doc="Band to use for blue channel of color pngs in lupton color mapping.", 

1721 dtype=str, 

1722 default="g", 

1723 ) 

1724 green_channel_band = pexConfig.Field( 

1725 doc="Band to use for green channel of color pngs in lupton color mapping.", 

1726 dtype=str, 

1727 default="r", 

1728 ) 

1729 red_channel_band = pexConfig.Field( 

1730 doc="Band to use for red channel of color pngs in lupton color mapping.", 

1731 dtype=str, 

1732 default="i", 

1733 ) 

1734 png_color_asinh_minimum = pexConfig.Field( 

1735 doc="AsinhMapping intensity to be mapped to black for color png scaling in lupton color mapping.", 

1736 dtype=float, 

1737 default=0.0, 

1738 ) 

1739 png_color_asinh_stretch = pexConfig.Field( 

1740 doc="AsinhMapping linear stretch for color png scaling in lupton color mapping.", 

1741 dtype=float, 

1742 default=5.0, 

1743 ) 

1744 png_color_asinh_softening = pexConfig.Field( 

1745 doc="AsinhMapping softening parameter (Q) for color png scaling in lupton color mapping.", 

1746 dtype=float, 

1747 default=8.0, 

1748 ) 

1749 rgbGenerator = pexConfig.ConfigurableField[PrettyPictureTask]( 

1750 doc="The task to use to generate an RGB image", target=PrettyPictureTask 

1751 ) 

1752 rgbStyle = pexConfig.ChoiceField[str]( 

1753 doc="The rgb mapping style, must be one of lsstRGB or lupton", 

1754 allowed={ 

1755 "lupton": "Use the lupton algorithm for RGB images", 

1756 "lsstRGB": "Use new style lsstRGB color algorithm for RGB images", 

1757 }, 

1758 default="lupton", 

1759 ) 

1760 

1761 def setDefaults(self): 

1762 super().setDefaults() 

1763 self.rgbGenerator: PrettyPictureConfig 

1764 self.rgbGenerator.imageRemappingConfig.absMax = 550 

1765 self.rgbGenerator.luminanceConfig.Q = 0.7 

1766 self.rgbGenerator.doPSFDeconcovlve = False 

1767 self.rgbGenerator.exposureBrackets = None 

1768 self.rgbGenerator.localContrastConfig.doLocalContrast = False 

1769 self.rgbGenerator.luminanceConfig.stretch = 250 

1770 self.rgbGenerator.luminanceConfig.max = 100 

1771 self.rgbGenerator.luminanceConfig.highlight = 0.905882 

1772 self.rgbGenerator.luminanceConfig.shadow = 0.12 

1773 self.rgbGenerator.luminanceConfig.midtone = 0.25 

1774 self.rgbGenerator.colorConfig.maxChroma = 80 

1775 self.rgbGenerator.colorConfig.saturation = 0.6 

1776 self.rgbGenerator.cieWhitePoint = (0.28, 0.28) 

1777 

1778 return 

1779 

1780 

1781class GenerateColorHipsTask(GenerateHipsTask): 

1782 """Task for making a HiPS tree with color pngs.""" 

1783 

1784 ConfigClass = GenerateColorHipsConfig 

1785 _DefaultName = "generateColorHips" 

1786 color_task = True 

1787 

1788 def __init__(self, *, config=None, log=None, initInputs=None, **kwargs): 

1789 super().__init__(config=config, log=log, **kwargs) 

1790 self.makeSubtask("rgbGenerator") 

1791 

1792 def _check_data_bands(self, data_bands): 

1793 """Check the data for configured bands. 

1794 

1795 Warn if any color bands are missing data. 

1796 

1797 Parameters 

1798 ---------- 

1799 data_bands : `set` [`str`] 

1800 Bands from the input data. 

1801 

1802 Returns 

1803 ------- 

1804 bands : `list` [`str`] 

1805 List of bands in bgr color order. 

1806 """ 

1807 if len(data_bands) == 0: 

1808 raise RuntimeError("GenerateColorHipsTask must have data from at least one band.") 

1809 

1810 match self.config.rgbStyle: 

1811 case "lupton": 

1812 if self.config.blue_channel_band not in data_bands: 

1813 self.log.warning( 

1814 "Color png blue_channel_band %s not in dataset.", self.config.blue_channel_band 

1815 ) 

1816 if self.config.green_channel_band not in data_bands: 

1817 self.log.warning( 

1818 "Color png green_channel_band %s not in dataset.", self.config.green_channel_band 

1819 ) 

1820 if self.config.red_channel_band not in data_bands: 

1821 self.log.warning( 

1822 "Color png red_channel_band %s not in dataset.", self.config.red_channel_band 

1823 ) 

1824 

1825 bands = [ 

1826 self.config.blue_channel_band, 

1827 self.config.green_channel_band, 

1828 self.config.red_channel_band, 

1829 ] 

1830 return bands 

1831 case "lsstRGB": 

1832 # The pretty picture maker task supports compositing more than 3 bands 

1833 # into an rgb image. Find all the bands specified and list them in 

1834 # bgr order according to the hue specified for each astrophysical band. 

1835 band_names = [] 

1836 band_values = [] 

1837 for band_name, config in self.config.rgbGenerator.channelConfig.items(): 

1838 band_names.append(band_name) 

1839 band_values.append((config.r, config.g, config.b)) 

1840 # convert to a space where it is easy to calcualte the hue 

1841 labs = colour.XYZ_to_Oklab(colour.RGB_to_XYZ(band_values, colourspace="CIE RGB")) 

1842 hues = np.arctan2(labs[:, 2], labs[:, 1]) 

1843 # transform negative angles to an offset from 2pi 

1844 hues[hues < 0] = 2 * np.pi + hues[hues < 0] 

1845 # reversed here to transform from rgb to bgr order 

1846 order = np.argsort(hues)[::-1] 

1847 config_bands = [band_names[index] for index in order] 

1848 

1849 for band_name in config_bands: 

1850 if band_name not in data_bands: 

1851 self.log.warning( 

1852 f"Data band {band_name} is specified in the RGB config but there is no input " 

1853 "data for that band." 

1854 ) 

1855 

1856 return config_bands