Coverage for python/lsst/pipe/tasks/hips.py: 14%

619 statements  

« prev     ^ index     » next       coverage.py v7.5.1, created at 2024-05-15 02:18 -0700

1# This file is part of pipe_tasks. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22"""Tasks for making and manipulating HIPS images.""" 

23 

24__all__ = ["HighResolutionHipsTask", "HighResolutionHipsConfig", "HighResolutionHipsConnections", 

25 "HighResolutionHipsQuantumGraphBuilder", 

26 "GenerateHipsTask", "GenerateHipsConfig", "GenerateColorHipsTask", "GenerateColorHipsConfig"] 

27 

28from collections import defaultdict 

29import numpy as np 

30import argparse 

31import io 

32import sys 

33import re 

34import warnings 

35import math 

36from datetime import datetime 

37import hpgeom as hpg 

38import healsparse as hsp 

39from astropy.io import fits 

40from astropy.visualization.lupton_rgb import AsinhMapping 

41from PIL import Image 

42 

43from lsst.sphgeom import RangeSet, HealpixPixelization 

44from lsst.utils.timer import timeMethod 

45from lsst.daf.butler import Butler 

46import lsst.pex.config as pexConfig 

47import lsst.pipe.base as pipeBase 

48from lsst.pipe.base.quantum_graph_builder import QuantumGraphBuilder 

49from lsst.pipe.base.quantum_graph_skeleton import QuantumGraphSkeleton, DatasetKey 

50import lsst.afw.geom as afwGeom 

51import lsst.afw.math as afwMath 

52import lsst.afw.image as afwImage 

53import lsst.geom as geom 

54from lsst.afw.geom import makeHpxWcs 

55from lsst.resources import ResourcePath 

56 

57from .healSparseMapping import _is_power_of_two 

58 

59 

60class HighResolutionHipsConnections(pipeBase.PipelineTaskConnections, 

61 dimensions=("healpix9", "band"), 

62 defaultTemplates={"coaddName": "deep"}): 

63 coadd_exposure_handles = pipeBase.connectionTypes.Input( 

64 doc="Coadded exposures to convert to HIPS format.", 

65 name="{coaddName}Coadd_calexp", 

66 storageClass="ExposureF", 

67 dimensions=("tract", "patch", "skymap", "band"), 

68 multiple=True, 

69 deferLoad=True, 

70 ) 

71 hips_exposures = pipeBase.connectionTypes.Output( 

72 doc="HiPS-compatible HPX image.", 

73 name="{coaddName}Coadd_hpx", 

74 storageClass="ExposureF", 

75 dimensions=("healpix11", "band"), 

76 multiple=True, 

77 ) 

78 

79 def __init__(self, *, config=None): 

80 super().__init__(config=config) 

81 

82 quantum_order = None 

83 for dim in self.dimensions: 

84 if "healpix" in dim: 

85 if quantum_order is not None: 

86 raise ValueError("Must not specify more than one quantum healpix dimension.") 

87 quantum_order = int(dim.split("healpix")[1]) 

88 if quantum_order is None: 

89 raise ValueError("Must specify a healpix dimension in quantum dimensions.") 

90 

91 if quantum_order > config.hips_order: 

92 raise ValueError("Quantum healpix dimension order must not be greater than hips_order") 

93 

94 order = None 

95 for dim in self.hips_exposures.dimensions: 

96 if "healpix" in dim: 

97 if order is not None: 

98 raise ValueError("Must not specify more than one healpix dimension.") 

99 order = int(dim.split("healpix")[1]) 

100 if order is None: 

101 raise ValueError("Must specify a healpix dimension in hips_exposure dimensions.") 

102 

103 if order != config.hips_order: 

104 raise ValueError("healpix dimension order must match config.hips_order.") 

105 

106 

107class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig, 

108 pipelineConnections=HighResolutionHipsConnections): 

109 """Configuration parameters for HighResolutionHipsTask. 

110 

111 Notes 

112 ----- 

113 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to 

114 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix 

115 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which 

116 defines the target resolution of the HiPS image. The IVOA recommends 

117 shift_order=9, for 2**9=512 pixels on a side. 

118 

119 Table 5 from 

120 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf 

121 shows the relationship between hips_order, number of tiles (full 

122 sky coverage), cell size, and sub-pixel size/image resolution (with 

123 the default shift_order=9): 

124 +------------+-----------------+--------------+------------------+ 

125 | hips_order | Number of Tiles | Cell Size | Image Resolution | 

126 +============+=================+==============+==================+ 

127 | 0 | 12 | 58.63 deg | 6.871 arcmin | 

128 | 1 | 48 | 29.32 deg | 3.435 arcmin | 

129 | 2 | 192 | 14.66 deg | 1.718 arcmin | 

130 | 3 | 768 | 7.329 deg | 51.53 arcsec | 

131 | 4 | 3072 | 3.665 deg | 25.77 arcsec | 

132 | 5 | 12288 | 1.832 deg | 12.88 arcsec | 

133 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec | 

134 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec | 

135 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec | 

136 | 9 | 3145728 | 6.871 arcmin | 805.2mas | 

137 | 10 | 12582912 | 3.435 arcmin | 402.6mas | 

138 | 11 | 50331648 | 1.718 arcmin | 201.3mas | 

139 | 12 | 201326592 | 51.53 arcsec | 100.6mas | 

140 | 13 | 805306368 | 25.77 arcsec | 50.32mas | 

141 +------------+-----------------+--------------+------------------+ 

142 """ 

143 hips_order = pexConfig.Field( 

144 doc="HIPS image order.", 

145 dtype=int, 

146 default=11, 

147 ) 

148 shift_order = pexConfig.Field( 

149 doc="HIPS shift order (such that each tile is 2**shift_order pixels on a side)", 

150 dtype=int, 

151 default=9, 

152 ) 

153 warp = pexConfig.ConfigField( 

154 dtype=afwMath.Warper.ConfigClass, 

155 doc="Warper configuration", 

156 ) 

157 

158 def setDefaults(self): 

159 self.warp.warpingKernelName = "lanczos5" 

160 

161 

162class HipsTaskNameDescriptor: 

163 """Descriptor used create a DefaultName that matches the order of 

164 the defined dimensions in the connections class. 

165 

166 Parameters 

167 ---------- 

168 prefix : `str` 

169 The prefix of the Default name, to which the order will be 

170 appended. 

171 """ 

172 def __init__(self, prefix): 

173 # create a defaultName template 

174 self._defaultName = f"{prefix}{{}}" 

175 self._order = None 

176 

177 def __get__(self, obj, klass=None): 

178 if klass is None: 

179 raise RuntimeError( 

180 "HipsTaskDescriptor was used in an unexpected context" 

181 ) 

182 if self._order is None: 

183 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions 

184 for dim in klassDimensions: 

185 if (match := re.match(r"^healpix(\d*)$", dim)) is not None: 

186 self._order = int(match.group(1)) 

187 break 

188 else: 

189 raise RuntimeError( 

190 "Could not find healpix dimension in connections class" 

191 ) 

192 return self._defaultName.format(self._order) 

193 

194 

195class HighResolutionHipsTask(pipeBase.PipelineTask): 

196 """Task for making high resolution HiPS images.""" 

197 ConfigClass = HighResolutionHipsConfig 

198 _DefaultName = HipsTaskNameDescriptor("highResolutionHips") 

199 

200 def __init__(self, **kwargs): 

201 super().__init__(**kwargs) 

202 self.warper = afwMath.Warper.fromConfig(self.config.warp) 

203 

204 @timeMethod 

205 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

206 inputs = butlerQC.get(inputRefs) 

207 

208 healpix_dim = f"healpix{self.config.hips_order}" 

209 

210 pixels = [hips_exposure.dataId[healpix_dim] 

211 for hips_exposure in outputRefs.hips_exposures] 

212 

213 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs["coadd_exposure_handles"]) 

214 

215 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]: 

216 hips_exposure_ref for hips_exposure_ref in outputRefs.hips_exposures} 

217 for pixel, hips_exposure in outputs.hips_exposures.items(): 

218 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel]) 

219 

220 def run(self, pixels, coadd_exposure_handles): 

221 """Run the HighResolutionHipsTask. 

222 

223 Parameters 

224 ---------- 

225 pixels : `Iterable` [ `int` ] 

226 Iterable of healpix pixels (nest ordering) to warp to. 

227 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`] 

228 Handles for the coadd exposures. 

229 

230 Returns 

231 ------- 

232 outputs : `lsst.pipe.base.Struct` 

233 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value) 

234 """ 

235 self.log.info("Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order) 

236 

237 npix = 2**self.config.shift_order 

238 bbox_hpx = geom.Box2I(corner=geom.Point2I(0, 0), 

239 dimensions=geom.Extent2I(npix, npix)) 

240 

241 # For each healpix pixel we will create an empty exposure with the 

242 # correct HPX WCS. We furthermore create a dict to hold each of 

243 # the warps that will go into each HPX exposure. 

244 exp_hpx_dict = {} 

245 warp_dict = {} 

246 for pixel in pixels: 

247 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order) 

248 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx) 

249 exp_hpx_dict[pixel] = exp_hpx 

250 warp_dict[pixel] = [] 

251 

252 first_handle = True 

253 # Loop over input coadd exposures to minimize i/o (this speeds things 

254 # up by ~8x to batch together pixels that overlap a given coadd). 

255 for handle in coadd_exposure_handles: 

256 coadd_exp = handle.get() 

257 

258 # For each pixel, warp the coadd to the HPX WCS for the pixel. 

259 for pixel in pixels: 

260 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx) 

261 

262 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs()) 

263 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan) 

264 

265 if first_handle: 

266 # Make sure the mask planes, filter, and photocalib of the output 

267 # exposure match the (first) input exposure. 

268 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict()) 

269 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter()) 

270 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib()) 

271 

272 if warped.getBBox().getArea() == 0 or not np.any(np.isfinite(warped.image.array)): 

273 # There is no overlap, skip. 

274 self.log.debug( 

275 "No overlap between output HPX %d and input exposure %s", 

276 pixel, 

277 handle.dataId 

278 ) 

279 continue 

280 

281 exp.maskedImage.assign(warped.maskedImage, warped.getBBox()) 

282 warp_dict[pixel].append(exp.maskedImage) 

283 

284 first_handle = False 

285 

286 stats_flags = afwMath.stringToStatisticsProperty("MEAN") 

287 stats_ctrl = afwMath.StatisticsControl() 

288 stats_ctrl.setNanSafe(True) 

289 stats_ctrl.setWeighted(True) 

290 stats_ctrl.setCalcErrorFromInputVariance(True) 

291 

292 # Loop over pixels and combine the warps for each pixel. 

293 # The combination is done with a simple mean for pixels that 

294 # overlap in neighboring patches. 

295 for pixel in pixels: 

296 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan) 

297 

298 if not warp_dict[pixel]: 

299 # Nothing in this pixel 

300 self.log.debug("No data in HPX pixel %d", pixel) 

301 # Remove the pixel from the output, no need to persist an 

302 # empty exposure. 

303 exp_hpx_dict.pop(pixel) 

304 continue 

305 

306 exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack( 

307 warp_dict[pixel], 

308 stats_flags, 

309 stats_ctrl, 

310 [1.0]*len(warp_dict[pixel]), 

311 clipped=0, 

312 maskMap=[] 

313 ) 

314 

315 return pipeBase.Struct(hips_exposures=exp_hpx_dict) 

316 

317 @classmethod 

318 def build_quantum_graph_cli(cls, argv): 

319 """A command-line interface entry point to `build_quantum_graph`. 

320 This method provides the implementation for the 

321 ``build-high-resolution-hips-qg`` script. 

322 

323 Parameters 

324 ---------- 

325 argv : `Sequence` [ `str` ] 

326 Command-line arguments (e.g. ``sys.argv[1:]``). 

327 """ 

328 parser = cls._make_cli_parser() 

329 

330 args = parser.parse_args(argv) 

331 

332 if args.subparser_name is None: 

333 parser.print_help() 

334 sys.exit(1) 

335 

336 pipeline = pipeBase.Pipeline.from_uri(args.pipeline) 

337 pipeline_graph = pipeline.to_graph() 

338 

339 if len(pipeline_graph.tasks) != 1: 

340 raise RuntimeError(f"Pipeline file {args.pipeline} may only contain one task.") 

341 

342 (task_node,) = pipeline_graph.tasks.values() 

343 

344 butler = Butler(args.butler_config, collections=args.input) 

345 

346 if args.subparser_name == "segment": 

347 # Do the segmentation 

348 hpix_pixelization = HealpixPixelization(level=args.hpix_build_order) 

349 dataset = task_node.inputs["coadd_exposure_handles"].dataset_type_name 

350 data_ids = set(butler.registry.queryDataIds("tract", datasets=dataset).expanded()) 

351 region_pixels = [] 

352 for data_id in data_ids: 

353 region = data_id.region 

354 pixel_range = hpix_pixelization.envelope(region) 

355 for r in pixel_range.ranges(): 

356 region_pixels.extend(range(r[0], r[1])) 

357 indices = np.unique(region_pixels) 

358 

359 print(f"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:") 

360 for pixel in indices: 

361 print(pixel) 

362 

363 elif args.subparser_name == "build": 

364 # Build the quantum graph. 

365 

366 # Figure out collection names. 

367 if args.output_run is None: 

368 if args.output is None: 

369 raise ValueError("At least one of --output or --output-run options is required.") 

370 args.output_run = "{}/{}".format(args.output, pipeBase.Instrument.makeCollectionTimestamp()) 

371 

372 build_ranges = RangeSet(sorted(args.pixels)) 

373 

374 # Metadata includes a subset of attributes defined in CmdLineFwk. 

375 metadata = { 

376 "input": args.input, 

377 "butler_argument": args.butler_config, 

378 "output": args.output, 

379 "output_run": args.output_run, 

380 "data_query": args.where, 

381 "time": f"{datetime.now()}", 

382 } 

383 

384 builder = HighResolutionHipsQuantumGraphBuilder( 

385 pipeline_graph, 

386 butler, 

387 input_collections=args.input, 

388 output_run=args.output_run, 

389 constraint_order=args.hpix_build_order, 

390 constraint_ranges=build_ranges, 

391 where=args.where, 

392 ) 

393 qg = builder.build(metadata, attach_datastore_records=True) 

394 qg.saveUri(args.save_qgraph) 

395 

396 @classmethod 

397 def _make_cli_parser(cls): 

398 """Make the command-line parser. 

399 

400 Returns 

401 ------- 

402 parser : `argparse.ArgumentParser` 

403 """ 

404 parser = argparse.ArgumentParser( 

405 description=( 

406 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets." 

407 ), 

408 ) 

409 subparsers = parser.add_subparsers(help="sub-command help", dest="subparser_name") 

410 

411 parser_segment = subparsers.add_parser("segment", 

412 help="Determine survey segments for workflow.") 

413 parser_build = subparsers.add_parser("build", 

414 help="Build quantum graph for HighResolutionHipsTask") 

415 

416 for sub in [parser_segment, parser_build]: 

417 # These arguments are in common. 

418 sub.add_argument( 

419 "-b", 

420 "--butler-config", 

421 type=str, 

422 help="Path to data repository or butler configuration.", 

423 required=True, 

424 ) 

425 sub.add_argument( 

426 "-p", 

427 "--pipeline", 

428 type=str, 

429 help="Pipeline file, limited to one task.", 

430 required=True, 

431 ) 

432 sub.add_argument( 

433 "-i", 

434 "--input", 

435 type=str, 

436 nargs="+", 

437 help="Input collection(s) to search for coadd exposures.", 

438 required=True, 

439 ) 

440 sub.add_argument( 

441 "-o", 

442 "--hpix_build_order", 

443 type=int, 

444 default=1, 

445 help="HEALPix order to segment sky for building quantum graph files.", 

446 ) 

447 sub.add_argument( 

448 "-w", 

449 "--where", 

450 type=str, 

451 default=None, 

452 help="Data ID expression used when querying for input coadd datasets.", 

453 ) 

454 

455 parser_build.add_argument( 

456 "--output", 

457 type=str, 

458 help=( 

459 "Name of the output CHAINED collection. If this options is specified and " 

460 "--output-run is not, then a new RUN collection will be created by appending " 

461 "a timestamp to the value of this option." 

462 ), 

463 default=None, 

464 metavar="COLL", 

465 ) 

466 parser_build.add_argument( 

467 "--output-run", 

468 type=str, 

469 help=( 

470 "Output RUN collection to write resulting images. If not provided " 

471 "then --output must be provided and a new RUN collection will be created " 

472 "by appending a timestamp to the value passed with --output." 

473 ), 

474 default=None, 

475 metavar="RUN", 

476 ) 

477 parser_build.add_argument( 

478 "-q", 

479 "--save-qgraph", 

480 type=str, 

481 help="Output filename for QuantumGraph.", 

482 required=True, 

483 ) 

484 parser_build.add_argument( 

485 "-P", 

486 "--pixels", 

487 type=int, 

488 nargs="+", 

489 help="Pixels at --hpix_build_order to generate quantum graph.", 

490 required=True, 

491 ) 

492 

493 return parser 

494 

495 

496class HighResolutionHipsQuantumGraphBuilder(QuantumGraphBuilder): 

497 """A custom a `lsst.pipe.base.QuantumGraphBuilder` for running 

498 `HighResolutionHipsTask` only. 

499 

500 This is a workaround for incomplete butler query support for HEALPix 

501 dimensions. 

502 

503 Parameters 

504 ---------- 

505 pipeline_graph : `lsst.pipe.base.PipelineGraph` 

506 Pipeline graph with exactly one task, which must be a configuration 

507 of `HighResolutionHipsTask`. 

508 butler : `lsst.daf.butler.Butler` 

509 Client for the butler data repository. May be read-only. 

510 input_collections : `str` or `Iterable` [ `str` ], optional 

511 Collection or collections to search for input datasets, in order. 

512 If not provided, ``butler.collections`` will be searched. 

513 output_run : `str`, optional 

514 Name of the output collection. If not provided, ``butler.run`` will 

515 be used. 

516 constraint_order : `int` 

517 HEALPix order used to constrain which quanta are generated, via 

518 ``constraint_indices``. This should be a coarser grid (smaller 

519 order) than the order used for the task's quantum and output data 

520 IDs, and ideally something between the spatial scale of a patch or 

521 the data repository's "common skypix" system (usually ``htm7``). 

522 constraint_ranges : `lsst.sphgeom.RangeSet` 

523 RangeSet that describes constraint pixels (HEALPix NEST, with order 

524 ``constraint_order``) to constrain generated quanta. 

525 where : `str`, optional 

526 A boolean `str` expression of the form accepted by 

527 `Registry.queryDatasets` to constrain input datasets. This may 

528 contain a constraint on tracts, patches, or bands, but not HEALPix 

529 indices. Constraints on tracts and patches should usually be 

530 unnecessary, however - existing coadds that overlap the given 

531 HEALpix indices will be selected without such a constraint, and 

532 providing one may reject some that should normally be included. 

533 """ 

534 

535 def __init__( 

536 self, 

537 pipeline_graph, 

538 butler, 

539 *, 

540 input_collections=None, 

541 output_run=None, 

542 constraint_order, 

543 constraint_ranges, 

544 where="", 

545 ): 

546 super().__init__(pipeline_graph, butler, input_collections=input_collections, output_run=output_run) 

547 self.constraint_order = constraint_order 

548 self.constraint_ranges = constraint_ranges 

549 self.where = where 

550 

551 def process_subgraph(self, subgraph): 

552 # Docstring inherited. 

553 (task_node,) = subgraph.tasks.values() 

554 

555 # Since we know this is the only task in the pipeline, we know there 

556 # is only one overall input and one regular output. 

557 (input_dataset_type_node,) = subgraph.inputs_of(task_node.label).values() 

558 assert input_dataset_type_node is not None, "PipelineGraph should be resolved by base class." 

559 (output_edge,) = task_node.outputs.values() 

560 output_dataset_type_node = subgraph.dataset_types[output_edge.parent_dataset_type_name] 

561 (hpx_output_dimension,) = ( 

562 self.butler.dimensions.skypix_dimensions[d] 

563 for d in output_dataset_type_node.dimensions.skypix.names 

564 ) 

565 constraint_hpx_pixelization = ( 

566 self.butler.dimensions.skypix_dimensions[f"healpix{self.constraint_order}"].pixelization 

567 ) 

568 common_skypix_name = self.butler.dimensions.commonSkyPix.name 

569 common_skypix_pixelization = self.butler.dimensions.commonSkyPix.pixelization 

570 

571 # We will need all the pixels at the quantum resolution as well. 

572 # '4' appears here frequently because it's the number of pixels at 

573 # level N in a single pixel at level N. 

574 (hpx_dimension,) = ( 

575 self.butler.dimensions.skypix_dimensions[d] for d in task_node.dimensions.names if d != "band" 

576 ) 

577 hpx_pixelization = hpx_dimension.pixelization 

578 if hpx_pixelization.level < self.constraint_order: 

579 raise ValueError(f"Quantum order {hpx_pixelization.level} must be < {self.constraint_order}") 

580 hpx_ranges = self.constraint_ranges.scaled(4**(hpx_pixelization.level - self.constraint_order)) 

581 

582 # We can be generous in looking for pixels here, because we constrain 

583 # by actual patch regions below. 

584 common_skypix_ranges = RangeSet() 

585 for begin, end in self.constraint_ranges: 

586 for hpx_index in range(begin, end): 

587 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index) 

588 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region) 

589 

590 # To keep the query from getting out of hand (and breaking) we simplify 

591 # until we have fewer than 100 ranges which seems to work fine. 

592 for simp in range(1, 10): 

593 if len(common_skypix_ranges) < 100: 

594 break 

595 common_skypix_ranges.simplify(simp) 

596 

597 # Use that RangeSet to assemble a WHERE constraint expression. This 

598 # could definitely get too big if the "constraint healpix" order is too 

599 # fine. 

600 where_terms = [] 

601 bind = {} 

602 for n, (begin, end) in enumerate(common_skypix_ranges): 

603 stop = end - 1 # registry range syntax is inclusive 

604 if begin == stop: 

605 where_terms.append(f"{common_skypix_name} = cpx{n}") 

606 bind[f"cpx{n}"] = begin 

607 else: 

608 where_terms.append(f"({common_skypix_name} >= cpx{n}a AND {common_skypix_name} <= cpx{n}b)") 

609 bind[f"cpx{n}a"] = begin 

610 bind[f"cpx{n}b"] = stop 

611 where = " OR ".join(where_terms) 

612 if self.where: 

613 where = f"({self.where}) AND ({where})" 

614 # Query for input datasets with this constraint, and ask for expanded 

615 # data IDs because we want regions. Immediately group this by patch so 

616 # we don't do later geometric stuff n_bands more times than we need to. 

617 input_refs = self.butler.registry.queryDatasets( 

618 input_dataset_type_node.dataset_type, 

619 where=where, 

620 findFirst=True, 

621 collections=self.input_collections, 

622 bind=bind 

623 ).expanded() 

624 inputs_by_patch = defaultdict(set) 

625 patch_dimensions = self.butler.dimensions.conform(["patch"]) 

626 for input_ref in input_refs: 

627 dataset_key = DatasetKey(input_ref.datasetType.name, input_ref.dataId.required_values) 

628 self.existing_datasets.inputs[dataset_key] = input_ref 

629 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(dataset_key) 

630 if not inputs_by_patch: 

631 message_body = "\n".join(input_refs.explain_no_results()) 

632 raise RuntimeError(f"No inputs found:\n{message_body}") 

633 

634 # Iterate over patches and compute the set of output healpix pixels 

635 # that overlap each one. Use that to associate inputs with output 

636 # pixels, but only for the output pixels we've already identified. 

637 inputs_by_hpx = defaultdict(set) 

638 for patch_data_id, input_keys_for_patch in inputs_by_patch.items(): 

639 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region) 

640 for begin, end in patch_hpx_ranges & hpx_ranges: 

641 for hpx_index in range(begin, end): 

642 inputs_by_hpx[hpx_index].update(input_keys_for_patch) 

643 

644 # Iterate over the dict we just created and create preliminary quanta. 

645 skeleton = QuantumGraphSkeleton([task_node.label]) 

646 for hpx_index, input_keys_for_hpx_index in inputs_by_hpx.items(): 

647 # Group inputs by band. 

648 input_keys_by_band = defaultdict(list) 

649 for input_key in input_keys_for_hpx_index: 

650 input_ref = self.existing_datasets.inputs[input_key] 

651 input_keys_by_band[input_ref.dataId["band"]].append(input_key) 

652 # Iterate over bands to make quanta. 

653 for band, input_keys_for_band in input_keys_by_band.items(): 

654 data_id = self.butler.registry.expandDataId({hpx_dimension.name: hpx_index, "band": band}) 

655 quantum_key = skeleton.add_quantum_node(task_node.label, data_id) 

656 # Add inputs to the skelton 

657 skeleton.add_input_edges(quantum_key, input_keys_for_band) 

658 # Add the regular outputs. 

659 hpx_pixel_ranges = RangeSet(hpx_index) 

660 hpx_output_ranges = hpx_pixel_ranges.scaled( 

661 4**(task_node.config.hips_order - hpx_pixelization.level) 

662 ) 

663 for begin, end in hpx_output_ranges: 

664 for hpx_output_index in range(begin, end): 

665 dataset_key = skeleton.add_dataset_node( 

666 output_dataset_type_node.name, 

667 self.butler.registry.expandDataId( 

668 {hpx_output_dimension: hpx_output_index, "band": band} 

669 ), 

670 ) 

671 skeleton.add_output_edge(quantum_key, dataset_key) 

672 # Add auxiliary outputs (log, metadata). 

673 for write_edge in task_node.iter_all_outputs(): 

674 if write_edge.connection_name == output_edge.connection_name: 

675 continue 

676 dataset_key = skeleton.add_dataset_node(write_edge.parent_dataset_type_name, data_id) 

677 skeleton.add_output_edge(quantum_key, dataset_key) 

678 return skeleton 

679 

680 

681class HipsPropertiesSpectralTerm(pexConfig.Config): 

682 lambda_min = pexConfig.Field( 

683 doc="Minimum wavelength (nm)", 

684 dtype=float, 

685 ) 

686 lambda_max = pexConfig.Field( 

687 doc="Maximum wavelength (nm)", 

688 dtype=float, 

689 ) 

690 

691 

692class HipsPropertiesConfig(pexConfig.Config): 

693 """Configuration parameters for writing a HiPS properties file.""" 

694 creator_did_template = pexConfig.Field( 

695 doc=("Unique identifier of the HiPS - Format: IVOID. " 

696 "Use ``{band}`` to substitute the band name."), 

697 dtype=str, 

698 optional=False, 

699 ) 

700 obs_collection = pexConfig.Field( 

701 doc="Short name of original data set - Format: one word", 

702 dtype=str, 

703 optional=True, 

704 ) 

705 obs_description_template = pexConfig.Field( 

706 doc=("Data set description - Format: free text, longer free text " 

707 "description of the dataset. Use ``{band}`` to substitute " 

708 "the band name."), 

709 dtype=str, 

710 ) 

711 prov_progenitor = pexConfig.ListField( 

712 doc="Provenance of the original data - Format: free text", 

713 dtype=str, 

714 default=[], 

715 ) 

716 obs_title_template = pexConfig.Field( 

717 doc=("Data set title format: free text, but should be short. " 

718 "Use ``{band}`` to substitute the band name."), 

719 dtype=str, 

720 optional=False, 

721 ) 

722 spectral_ranges = pexConfig.ConfigDictField( 

723 doc=("Mapping from band to lambda_min, lamba_max (nm). May be approximate."), 

724 keytype=str, 

725 itemtype=HipsPropertiesSpectralTerm, 

726 default={}, 

727 ) 

728 initial_ra = pexConfig.Field( 

729 doc="Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.", 

730 dtype=float, 

731 optional=True, 

732 ) 

733 initial_dec = pexConfig.Field( 

734 doc="Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.", 

735 dtype=float, 

736 optional=True, 

737 ) 

738 initial_fov = pexConfig.Field( 

739 doc="Initial field-of-view (deg). If not set will use ~1 healpix tile.", 

740 dtype=float, 

741 optional=True, 

742 ) 

743 obs_ack = pexConfig.Field( 

744 doc="Observation acknowledgements (free text).", 

745 dtype=str, 

746 optional=True, 

747 ) 

748 t_min = pexConfig.Field( 

749 doc="Time (MJD) of earliest observation included in HiPS", 

750 dtype=float, 

751 optional=True, 

752 ) 

753 t_max = pexConfig.Field( 

754 doc="Time (MJD) of latest observation included in HiPS", 

755 dtype=float, 

756 optional=True, 

757 ) 

758 

759 def validate(self): 

760 super().validate() 

761 

762 if self.obs_collection is not None: 

763 if re.search(r"\s", self.obs_collection): 

764 raise ValueError("obs_collection cannot contain any space characters.") 

765 

766 def setDefaults(self): 

767 # Values here taken from 

768 # https://github.com/lsst-dm/dax_obscore/blob/44ac15029136e2ec15/configs/dp02.yaml#L46 

769 u_term = HipsPropertiesSpectralTerm() 

770 u_term.lambda_min = 330. 

771 u_term.lambda_max = 400. 

772 self.spectral_ranges["u"] = u_term 

773 g_term = HipsPropertiesSpectralTerm() 

774 g_term.lambda_min = 402. 

775 g_term.lambda_max = 552. 

776 self.spectral_ranges["g"] = g_term 

777 r_term = HipsPropertiesSpectralTerm() 

778 r_term.lambda_min = 552. 

779 r_term.lambda_max = 691. 

780 self.spectral_ranges["r"] = r_term 

781 i_term = HipsPropertiesSpectralTerm() 

782 i_term.lambda_min = 691. 

783 i_term.lambda_max = 818. 

784 self.spectral_ranges["i"] = i_term 

785 z_term = HipsPropertiesSpectralTerm() 

786 z_term.lambda_min = 818. 

787 z_term.lambda_max = 922. 

788 self.spectral_ranges["z"] = z_term 

789 y_term = HipsPropertiesSpectralTerm() 

790 y_term.lambda_min = 970. 

791 y_term.lambda_max = 1060. 

792 self.spectral_ranges["y"] = y_term 

793 

794 

795class GenerateHipsConnections(pipeBase.PipelineTaskConnections, 

796 dimensions=("instrument", "band"), 

797 defaultTemplates={"coaddName": "deep"}): 

798 hips_exposure_handles = pipeBase.connectionTypes.Input( 

799 doc="HiPS-compatible HPX images.", 

800 name="{coaddName}Coadd_hpx", 

801 storageClass="ExposureF", 

802 dimensions=("healpix11", "band"), 

803 multiple=True, 

804 deferLoad=True, 

805 ) 

806 

807 

808class GenerateHipsConfig(pipeBase.PipelineTaskConfig, 

809 pipelineConnections=GenerateHipsConnections): 

810 """Configuration parameters for GenerateHipsTask.""" 

811 # WARNING: In general PipelineTasks are not allowed to do any outputs 

812 # outside of the butler. This task has been given (temporary) 

813 # Special Dispensation because of the nature of HiPS outputs until 

814 # a more controlled solution can be found. 

815 hips_base_uri = pexConfig.Field( 

816 doc="URI to HiPS base for output.", 

817 dtype=str, 

818 optional=False, 

819 ) 

820 min_order = pexConfig.Field( 

821 doc="Minimum healpix order for HiPS tree.", 

822 dtype=int, 

823 default=3, 

824 ) 

825 properties = pexConfig.ConfigField( 

826 dtype=HipsPropertiesConfig, 

827 doc="Configuration for properties file.", 

828 ) 

829 allsky_tilesize = pexConfig.Field( 

830 dtype=int, 

831 doc="Allsky tile size; must be power of 2. HiPS standard recommends 64x64 tiles.", 

832 default=64, 

833 check=_is_power_of_two, 

834 ) 

835 png_gray_asinh_minimum = pexConfig.Field( 

836 doc="AsinhMapping intensity to be mapped to black for grayscale png scaling.", 

837 dtype=float, 

838 default=0.0, 

839 ) 

840 png_gray_asinh_stretch = pexConfig.Field( 

841 doc="AsinhMapping linear stretch for grayscale png scaling.", 

842 dtype=float, 

843 default=2.0, 

844 ) 

845 png_gray_asinh_softening = pexConfig.Field( 

846 doc="AsinhMapping softening parameter (Q) for grayscale png scaling.", 

847 dtype=float, 

848 default=8.0, 

849 ) 

850 

851 

852class GenerateHipsTask(pipeBase.PipelineTask): 

853 """Task for making a HiPS tree with FITS and grayscale PNGs.""" 

854 ConfigClass = GenerateHipsConfig 

855 _DefaultName = "generateHips" 

856 color_task = False 

857 

858 @timeMethod 

859 def runQuantum(self, butlerQC, inputRefs, outputRefs): 

860 inputs = butlerQC.get(inputRefs) 

861 

862 dims = inputRefs.hips_exposure_handles[0].dataId.dimensions.names 

863 order = None 

864 for dim in dims: 

865 if "healpix" in dim: 

866 order = int(dim.split("healpix")[1]) 

867 healpix_dim = dim 

868 break 

869 else: 

870 raise RuntimeError("Could not determine healpix order for input exposures.") 

871 

872 hips_exposure_handle_dict = { 

873 (hips_exposure_handle.dataId[healpix_dim], 

874 hips_exposure_handle.dataId["band"]): hips_exposure_handle 

875 for hips_exposure_handle in inputs["hips_exposure_handles"] 

876 } 

877 

878 data_bands = {hips_exposure_handle.dataId["band"] 

879 for hips_exposure_handle in inputs["hips_exposure_handles"]} 

880 bands = self._check_data_bands(data_bands) 

881 

882 self.run( 

883 bands=bands, 

884 max_order=order, 

885 hips_exposure_handle_dict=hips_exposure_handle_dict, 

886 do_color=self.color_task, 

887 ) 

888 

889 def _check_data_bands(self, data_bands): 

890 """Check that the data has only a single band. 

891 

892 Parameters 

893 ---------- 

894 data_bands : `set` [`str`] 

895 Bands from the input data. 

896 

897 Returns 

898 ------- 

899 bands : `list` [`str`] 

900 List of single band to process. 

901 

902 Raises 

903 ------ 

904 RuntimeError if there is not exactly one band. 

905 """ 

906 if len(data_bands) != 1: 

907 raise RuntimeError("GenerateHipsTask can only use data from a single band.") 

908 

909 return list(data_bands) 

910 

911 @timeMethod 

912 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False): 

913 """Run the GenerateHipsTask. 

914 

915 Parameters 

916 ---------- 

917 bands : `list [ `str` ] 

918 List of bands to be processed (or single band). 

919 max_order : `int` 

920 HEALPix order of the maximum (native) HPX exposures. 

921 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`} 

922 Dict of handles for the HiPS high-resolution exposures. 

923 Key is (pixel number, ``band``). 

924 do_color : `bool`, optional 

925 Do color pngs instead of per-band grayscale. 

926 """ 

927 min_order = self.config.min_order 

928 

929 if not do_color: 

930 png_grayscale_mapping = AsinhMapping( 

931 self.config.png_gray_asinh_minimum, 

932 self.config.png_gray_asinh_stretch, 

933 Q=self.config.png_gray_asinh_softening, 

934 ) 

935 else: 

936 png_color_mapping = AsinhMapping( 

937 self.config.png_color_asinh_minimum, 

938 self.config.png_color_asinh_stretch, 

939 Q=self.config.png_color_asinh_softening, 

940 ) 

941 

942 bcb = self.config.blue_channel_band 

943 gcb = self.config.green_channel_band 

944 rcb = self.config.red_channel_band 

945 colorstr = f"{bcb}{gcb}{rcb}" 

946 

947 # The base path is based on the hips_base_uri. 

948 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=True) 

949 

950 # We need to unique-ify the pixels because they show up for multiple bands. 

951 # The output of this is a sorted array. 

952 pixels = np.unique(np.array([pixel for pixel, _ in hips_exposure_handle_dict.keys()])) 

953 

954 # Add a "gutter" pixel at the end. Start with 0 which maps to 0 always. 

955 pixels = np.append(pixels, [0]) 

956 

957 # Convert the pixels to each order that will be generated. 

958 pixels_shifted = {} 

959 pixels_shifted[max_order] = pixels 

960 for order in range(max_order - 1, min_order - 1, -1): 

961 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2) 

962 

963 # And set the gutter to an illegal pixel value. 

964 for order in range(min_order, max_order + 1): 

965 pixels_shifted[order][-1] = -1 

966 

967 # Read in the first pixel for determining image properties. 

968 exp0 = list(hips_exposure_handle_dict.values())[0].get() 

969 bbox = exp0.getBBox() 

970 npix = bbox.getWidth() 

971 shift_order = int(np.round(np.log2(npix))) 

972 

973 # Create blank exposures for each level, including the highest order. 

974 # We also make sure we create blank exposures for any bands used in the color 

975 # PNGs, even if they aren't available. 

976 exposures = {} 

977 for band in bands: 

978 for order in range(min_order, max_order + 1): 

979 exp = exp0.Factory(bbox=bbox) 

980 exp.image.array[:, :] = np.nan 

981 exposures[(band, order)] = exp 

982 

983 # Loop over all pixels, avoiding the gutter. 

984 for pixel_counter, pixel in enumerate(pixels[:-1]): 

985 self.log.debug("Working on high resolution pixel %d", pixel) 

986 for band in bands: 

987 # Read all the exposures here for the highest order. 

988 # There will always be at least one band with a HiPS image available 

989 # at the highest order. However, for color images it is possible that 

990 # not all bands have coverage so we require this check. 

991 if (pixel, band) in hips_exposure_handle_dict: 

992 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get() 

993 

994 # Go up the HiPS tree. 

995 # We only write pixels and rebin to fill the parent pixel when we are 

996 # done with a current pixel, which is determined if the next pixel 

997 # has a different pixel number. 

998 for order in range(max_order, min_order - 1, -1): 

999 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]: 

1000 # This order is not done, and so none of the other orders will be. 

1001 break 

1002 

1003 # We can now write out the images for each band. 

1004 # Note this will always trigger at the max order where each pixel is unique. 

1005 if not do_color: 

1006 for band in bands: 

1007 self._write_hips_image( 

1008 hips_base_path.join(f"band_{band}", forceDirectory=True), 

1009 order, 

1010 pixels_shifted[order][pixel_counter], 

1011 exposures[(band, order)].image, 

1012 png_grayscale_mapping, 

1013 shift_order=shift_order, 

1014 ) 

1015 else: 

1016 # Make a color png. 

1017 self._write_hips_color_png( 

1018 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1019 order, 

1020 pixels_shifted[order][pixel_counter], 

1021 exposures[(self.config.red_channel_band, order)].image, 

1022 exposures[(self.config.green_channel_band, order)].image, 

1023 exposures[(self.config.blue_channel_band, order)].image, 

1024 png_color_mapping, 

1025 ) 

1026 

1027 log_level = self.log.INFO if order == (max_order - 3) else self.log.DEBUG 

1028 self.log.log( 

1029 log_level, 

1030 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)", 

1031 ",".join(bands), 

1032 order, 

1033 pixels_shifted[order][pixel_counter], 

1034 pixel_counter, 

1035 len(pixels) - 1, 

1036 ) 

1037 

1038 # When we are at the top of the tree, erase top level images and continue. 

1039 if order == min_order: 

1040 for band in bands: 

1041 exposures[(band, order)].image.array[:, :] = np.nan 

1042 continue 

1043 

1044 # Now average the images for each band. 

1045 for band in bands: 

1046 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2) 

1047 with warnings.catch_warnings(): 

1048 warnings.simplefilter("ignore") 

1049 binned_image_arr = np.nanmean(arr, axis=(1, 3)) 

1050 

1051 # Fill the next level up. We figure out which of the four 

1052 # sub-pixels the current pixel occupies. 

1053 sub_index = (pixels_shifted[order][pixel_counter] 

1054 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2)) 

1055 

1056 # Fill exposure at the next level up. 

1057 exp = exposures[(band, order - 1)] 

1058 

1059 # Fill the correct subregion. 

1060 if sub_index == 0: 

1061 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr 

1062 elif sub_index == 1: 

1063 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr 

1064 elif sub_index == 2: 

1065 exp.image.array[npix//2:, npix//2:] = binned_image_arr 

1066 elif sub_index == 3: 

1067 exp.image.array[0: npix//2, npix//2:] = binned_image_arr 

1068 else: 

1069 # This should be impossible. 

1070 raise ValueError("Illegal pixel sub index") 

1071 

1072 # Erase the previous exposure. 

1073 if order < max_order: 

1074 exposures[(band, order)].image.array[:, :] = np.nan 

1075 

1076 # Write the properties files and MOCs. 

1077 if not do_color: 

1078 for band in bands: 

1079 band_pixels = np.array([pixel 

1080 for pixel, band_ in hips_exposure_handle_dict.keys() 

1081 if band_ == band]) 

1082 band_pixels = np.sort(band_pixels) 

1083 

1084 self._write_properties_and_moc( 

1085 hips_base_path.join(f"band_{band}", forceDirectory=True), 

1086 max_order, 

1087 band_pixels, 

1088 exp0, 

1089 shift_order, 

1090 band, 

1091 False, 

1092 ) 

1093 self._write_allsky_file( 

1094 hips_base_path.join(f"band_{band}", forceDirectory=True), 

1095 min_order, 

1096 ) 

1097 else: 

1098 self._write_properties_and_moc( 

1099 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1100 max_order, 

1101 pixels[:-1], 

1102 exp0, 

1103 shift_order, 

1104 colorstr, 

1105 True, 

1106 ) 

1107 self._write_allsky_file( 

1108 hips_base_path.join(f"color_{colorstr}", forceDirectory=True), 

1109 min_order, 

1110 ) 

1111 

1112 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9): 

1113 """Write a HiPS image. 

1114 

1115 Parameters 

1116 ---------- 

1117 hips_base_path : `lsst.resources.ResourcePath` 

1118 Resource path to the base of the HiPS directory tree. 

1119 order : `int` 

1120 HEALPix order of the HiPS image to write. 

1121 pixel : `int` 

1122 HEALPix pixel of the HiPS image. 

1123 image : `lsst.afw.image.Image` 

1124 Image to write. 

1125 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping` 

1126 Mapping to convert image to scaled png. 

1127 shift_order : `int`, optional 

1128 HPX shift_order. 

1129 """ 

1130 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1131 # outside of the butler. This task has been given (temporary) 

1132 # Special Dispensation because of the nature of HiPS outputs until 

1133 # a more controlled solution can be found. 

1134 

1135 dir_number = self._get_dir_number(pixel) 

1136 hips_dir = hips_base_path.join( 

1137 f"Norder{order}", 

1138 forceDirectory=True 

1139 ).join( 

1140 f"Dir{dir_number}", 

1141 forceDirectory=True 

1142 ) 

1143 

1144 wcs = makeHpxWcs(order, pixel, shift_order=shift_order) 

1145 

1146 uri = hips_dir.join(f"Npix{pixel}.fits") 

1147 

1148 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1149 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata()) 

1150 

1151 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1152 

1153 # And make a grayscale png as well 

1154 

1155 with np.errstate(invalid="ignore"): 

1156 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8) 

1157 

1158 vals[~np.isfinite(image.array) | (image.array < 0)] = 0 

1159 im = Image.fromarray(vals[::-1, :], "L") 

1160 

1161 uri = hips_dir.join(f"Npix{pixel}.png") 

1162 

1163 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1164 im.save(temporary_uri.ospath) 

1165 

1166 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1167 

1168 def _write_hips_color_png( 

1169 self, 

1170 hips_base_path, 

1171 order, 

1172 pixel, 

1173 image_red, 

1174 image_green, 

1175 image_blue, 

1176 png_mapping, 

1177 ): 

1178 """Write a color png HiPS image. 

1179 

1180 Parameters 

1181 ---------- 

1182 hips_base_path : `lsst.resources.ResourcePath` 

1183 Resource path to the base of the HiPS directory tree. 

1184 order : `int` 

1185 HEALPix order of the HiPS image to write. 

1186 pixel : `int` 

1187 HEALPix pixel of the HiPS image. 

1188 image_red : `lsst.afw.image.Image` 

1189 Input for red channel of output png. 

1190 image_green : `lsst.afw.image.Image` 

1191 Input for green channel of output png. 

1192 image_blue : `lsst.afw.image.Image` 

1193 Input for blue channel of output png. 

1194 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping` 

1195 Mapping to convert image to scaled png. 

1196 """ 

1197 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1198 # outside of the butler. This task has been given (temporary) 

1199 # Special Dispensation because of the nature of HiPS outputs until 

1200 # a more controlled solution can be found. 

1201 

1202 dir_number = self._get_dir_number(pixel) 

1203 hips_dir = hips_base_path.join( 

1204 f"Norder{order}", 

1205 forceDirectory=True 

1206 ).join( 

1207 f"Dir{dir_number}", 

1208 forceDirectory=True 

1209 ) 

1210 

1211 # We need to convert nans to the minimum values in the mapping. 

1212 arr_red = image_red.array.copy() 

1213 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0] 

1214 arr_green = image_green.array.copy() 

1215 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1] 

1216 arr_blue = image_blue.array.copy() 

1217 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2] 

1218 

1219 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue) 

1220 

1221 im = Image.fromarray(image_array[::-1, :, :], mode="RGB") 

1222 

1223 uri = hips_dir.join(f"Npix{pixel}.png") 

1224 

1225 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1226 im.save(temporary_uri.ospath) 

1227 

1228 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1229 

1230 def _write_properties_and_moc( 

1231 self, 

1232 hips_base_path, 

1233 max_order, 

1234 pixels, 

1235 exposure, 

1236 shift_order, 

1237 band, 

1238 multiband 

1239 ): 

1240 """Write HiPS properties file and MOC. 

1241 

1242 Parameters 

1243 ---------- 

1244 hips_base_path : : `lsst.resources.ResourcePath` 

1245 Resource path to the base of the HiPS directory tree. 

1246 max_order : `int` 

1247 Maximum HEALPix order. 

1248 pixels : `np.ndarray` (N,) 

1249 Array of pixels used. 

1250 exposure : `lsst.afw.image.Exposure` 

1251 Sample HPX exposure used for generating HiPS tiles. 

1252 shift_order : `int` 

1253 HPX shift order. 

1254 band : `str` 

1255 Band (or color). 

1256 multiband : `bool` 

1257 Is band multiband / color? 

1258 """ 

1259 area = hpg.nside_to_pixel_area(2**max_order, degrees=True)*len(pixels) 

1260 

1261 initial_ra = self.config.properties.initial_ra 

1262 initial_dec = self.config.properties.initial_dec 

1263 initial_fov = self.config.properties.initial_fov 

1264 

1265 if initial_ra is None or initial_dec is None or initial_fov is None: 

1266 # We want to point to an arbitrary pixel in the footprint. 

1267 # Just take the median pixel value for simplicity. 

1268 temp_pixels = pixels.copy() 

1269 if temp_pixels.size % 2 == 0: 

1270 temp_pixels = np.append(temp_pixels, [temp_pixels[0]]) 

1271 medpix = int(np.median(temp_pixels)) 

1272 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix) 

1273 _initial_fov = hpg.nside_to_resolution(2**max_order, units='arcminutes')/60. 

1274 

1275 if initial_ra is None or initial_dec is None: 

1276 initial_ra = _initial_ra 

1277 initial_dec = _initial_dec 

1278 if initial_fov is None: 

1279 initial_fov = _initial_fov 

1280 

1281 self._write_hips_properties_file( 

1282 hips_base_path, 

1283 self.config.properties, 

1284 band, 

1285 multiband, 

1286 exposure, 

1287 max_order, 

1288 shift_order, 

1289 area, 

1290 initial_ra, 

1291 initial_dec, 

1292 initial_fov, 

1293 ) 

1294 

1295 # Write the MOC coverage 

1296 self._write_hips_moc_file( 

1297 hips_base_path, 

1298 max_order, 

1299 pixels, 

1300 ) 

1301 

1302 def _write_hips_properties_file( 

1303 self, 

1304 hips_base_path, 

1305 properties_config, 

1306 band, 

1307 multiband, 

1308 exposure, 

1309 max_order, 

1310 shift_order, 

1311 area, 

1312 initial_ra, 

1313 initial_dec, 

1314 initial_fov 

1315 ): 

1316 """Write HiPS properties file. 

1317 

1318 Parameters 

1319 ---------- 

1320 hips_base_path : `lsst.resources.ResourcePath` 

1321 ResourcePath at top of HiPS tree. File will be written 

1322 to this path as ``properties``. 

1323 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig` 

1324 Configuration for properties values. 

1325 band : `str` 

1326 Name of band(s) for HiPS tree. 

1327 multiband : `bool` 

1328 Is multiband / color? 

1329 exposure : `lsst.afw.image.Exposure` 

1330 Sample HPX exposure used for generating HiPS tiles. 

1331 max_order : `int` 

1332 Maximum HEALPix order. 

1333 shift_order : `int` 

1334 HPX shift order. 

1335 area : `float` 

1336 Coverage area in square degrees. 

1337 initial_ra : `float` 

1338 Initial HiPS RA position (degrees). 

1339 initial_dec : `float` 

1340 Initial HiPS Dec position (degrees). 

1341 initial_fov : `float` 

1342 Initial HiPS display size (degrees). 

1343 """ 

1344 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1345 # outside of the butler. This task has been given (temporary) 

1346 # Special Dispensation because of the nature of HiPS outputs until 

1347 # a more controlled solution can be found. 

1348 def _write_property(fh, name, value): 

1349 """Write a property name/value to a file handle. 

1350 

1351 Parameters 

1352 ---------- 

1353 fh : file handle (blah) 

1354 Open for writing. 

1355 name : `str` 

1356 Name of property 

1357 value : `str` 

1358 Value of property 

1359 """ 

1360 # This ensures that the name has no spaces or space-like characters, 

1361 # per the HiPS standard. 

1362 if re.search(r"\s", name): 

1363 raise ValueError(f"``{name}`` cannot contain any space characters.") 

1364 if "=" in name: 

1365 raise ValueError(f"``{name}`` cannot contain an ``=``") 

1366 

1367 fh.write(f"{name:25}= {value}\n") 

1368 

1369 if exposure.image.array.dtype == np.dtype("float32"): 

1370 bitpix = -32 

1371 elif exposure.image.array.dtype == np.dtype("float64"): 

1372 bitpix = -64 

1373 elif exposure.image.array.dtype == np.dtype("int32"): 

1374 bitpix = 32 

1375 

1376 date_iso8601 = datetime.utcnow().isoformat(timespec="seconds") + "Z" 

1377 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units='degrees') 

1378 

1379 uri = hips_base_path.join("properties") 

1380 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1381 with open(temporary_uri.ospath, "w") as fh: 

1382 _write_property( 

1383 fh, 

1384 "creator_did", 

1385 properties_config.creator_did_template.format(band=band), 

1386 ) 

1387 if properties_config.obs_collection is not None: 

1388 _write_property(fh, "obs_collection", properties_config.obs_collection) 

1389 _write_property( 

1390 fh, 

1391 "obs_title", 

1392 properties_config.obs_title_template.format(band=band), 

1393 ) 

1394 if properties_config.obs_description_template is not None: 

1395 _write_property( 

1396 fh, 

1397 "obs_description", 

1398 properties_config.obs_description_template.format(band=band), 

1399 ) 

1400 if len(properties_config.prov_progenitor) > 0: 

1401 for prov_progenitor in properties_config.prov_progenitor: 

1402 _write_property(fh, "prov_progenitor", prov_progenitor) 

1403 if properties_config.obs_ack is not None: 

1404 _write_property(fh, "obs_ack", properties_config.obs_ack) 

1405 _write_property(fh, "obs_regime", "Optical") 

1406 _write_property(fh, "data_pixel_bitpix", str(bitpix)) 

1407 _write_property(fh, "dataproduct_type", "image") 

1408 _write_property(fh, "moc_sky_fraction", str(area/41253.)) 

1409 _write_property(fh, "data_ucd", "phot.flux") 

1410 _write_property(fh, "hips_creation_date", date_iso8601) 

1411 _write_property(fh, "hips_builder", "lsst.pipe.tasks.hips.GenerateHipsTask") 

1412 _write_property(fh, "hips_creator", "Vera C. Rubin Observatory") 

1413 _write_property(fh, "hips_version", "1.4") 

1414 _write_property(fh, "hips_release_date", date_iso8601) 

1415 _write_property(fh, "hips_frame", "equatorial") 

1416 _write_property(fh, "hips_order", str(max_order)) 

1417 _write_property(fh, "hips_tile_width", str(exposure.getBBox().getWidth())) 

1418 _write_property(fh, "hips_status", "private master clonableOnce") 

1419 if multiband: 

1420 _write_property(fh, "hips_tile_format", "png") 

1421 _write_property(fh, "dataproduct_subtype", "color") 

1422 else: 

1423 _write_property(fh, "hips_tile_format", "png fits") 

1424 _write_property(fh, "hips_pixel_bitpix", str(bitpix)) 

1425 _write_property(fh, "hips_pixel_scale", str(pixel_scale)) 

1426 _write_property(fh, "hips_initial_ra", str(initial_ra)) 

1427 _write_property(fh, "hips_initial_dec", str(initial_dec)) 

1428 _write_property(fh, "hips_initial_fov", str(initial_fov)) 

1429 if multiband: 

1430 if self.config.blue_channel_band in properties_config.spectral_ranges: 

1431 em_min = properties_config.spectral_ranges[ 

1432 self.config.blue_channel_band 

1433 ].lambda_min/1e9 

1434 else: 

1435 self.log.warning("blue band %s not in self.config.spectral_ranges.", band) 

1436 em_min = 3e-7 

1437 if self.config.red_channel_band in properties_config.spectral_ranges: 

1438 em_max = properties_config.spectral_ranges[ 

1439 self.config.red_channel_band 

1440 ].lambda_max/1e9 

1441 else: 

1442 self.log.warning("red band %s not in self.config.spectral_ranges.", band) 

1443 em_max = 1e-6 

1444 else: 

1445 if band in properties_config.spectral_ranges: 

1446 em_min = properties_config.spectral_ranges[band].lambda_min/1e9 

1447 em_max = properties_config.spectral_ranges[band].lambda_max/1e9 

1448 else: 

1449 self.log.warning("band %s not in self.config.spectral_ranges.", band) 

1450 em_min = 3e-7 

1451 em_max = 1e-6 

1452 _write_property(fh, "em_min", str(em_min)) 

1453 _write_property(fh, "em_max", str(em_max)) 

1454 if properties_config.t_min is not None: 

1455 _write_property(fh, "t_min", properties_config.t_min) 

1456 if properties_config.t_max is not None: 

1457 _write_property(fh, "t_max", properties_config.t_max) 

1458 

1459 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1460 

1461 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1): 

1462 """Write HiPS MOC file. 

1463 

1464 Parameters 

1465 ---------- 

1466 hips_base_path : `lsst.resources.ResourcePath` 

1467 ResourcePath to top of HiPS tree. File will be written as 

1468 to this path as ``Moc.fits``. 

1469 max_order : `int` 

1470 Maximum HEALPix order. 

1471 pixels : `np.ndarray` 

1472 Array of pixels covered. 

1473 min_uniq_order : `int`, optional 

1474 Minimum HEALPix order for looking for fully covered pixels. 

1475 """ 

1476 # WARNING: In general PipelineTasks are not allowed to do any outputs 

1477 # outside of the butler. This task has been given (temporary) 

1478 # Special Dispensation because of the nature of HiPS outputs until 

1479 # a more controlled solution can be found. 

1480 

1481 # Make the initial list of UNIQ pixels 

1482 uniq = 4*(4**max_order) + pixels 

1483 

1484 # Make a healsparse map which provides easy degrade/comparisons. 

1485 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32) 

1486 hspmap[pixels] = 1.0 

1487 

1488 # Loop over orders, degrade each time, and look for pixels with full coverage. 

1489 for uniq_order in range(max_order - 1, min_uniq_order - 1, -1): 

1490 hspmap = hspmap.degrade(2**uniq_order, reduction="sum") 

1491 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order)) 

1492 # Check if any of the pixels at uniq_order have full coverage. 

1493 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero() 

1494 if covered.size == 0: 

1495 # No pixels at uniq_order are fully covered, we're done. 

1496 break 

1497 # Replace the UNIQ pixels that are fully covered. 

1498 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered] 

1499 

1500 # Remove duplicate pixels. 

1501 uniq = np.unique(uniq) 

1502 

1503 # Output to fits. 

1504 tbl = np.zeros(uniq.size, dtype=[("UNIQ", "i8")]) 

1505 tbl["UNIQ"] = uniq 

1506 

1507 order = np.log2(tbl["UNIQ"]//4).astype(np.int32)//2 

1508 moc_order = np.max(order) 

1509 

1510 hdu = fits.BinTableHDU(tbl) 

1511 hdu.header["PIXTYPE"] = "HEALPIX" 

1512 hdu.header["ORDERING"] = "NUNIQ" 

1513 hdu.header["COORDSYS"] = "C" 

1514 hdu.header["MOCORDER"] = moc_order 

1515 hdu.header["MOCTOOL"] = "lsst.pipe.tasks.hips.GenerateHipsTask" 

1516 

1517 uri = hips_base_path.join("Moc.fits") 

1518 

1519 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1520 hdu.writeto(temporary_uri.ospath) 

1521 

1522 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1523 

1524 def _write_allsky_file(self, hips_base_path, allsky_order): 

1525 """Write an Allsky.png file. 

1526 

1527 Parameters 

1528 ---------- 

1529 hips_base_path : `lsst.resources.ResourcePath` 

1530 Resource path to the base of the HiPS directory tree. 

1531 allsky_order : `int` 

1532 HEALPix order of the minimum order to make allsky file. 

1533 """ 

1534 tile_size = self.config.allsky_tilesize 

1535 

1536 # The Allsky file format is described in 

1537 # https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf 

1538 # From S4.3.2: 

1539 # The Allsky file is built as an array of tiles, stored side by side in 

1540 # the left-to-right order. The width of this array must be the square 

1541 # root of the number of the tiles of the order. For instance, the width 

1542 # of this array at order 3 is 27 ( (int)sqrt(768) ). To avoid having a 

1543 # too large Allsky file, the resolution of each tile may be reduced but 

1544 # must stay a power of two (typically 64x64 pixels rather than 512x512). 

1545 

1546 n_tiles = hpg.nside_to_npixel(hpg.order_to_nside(allsky_order)) 

1547 n_tiles_wide = int(np.floor(np.sqrt(n_tiles))) 

1548 n_tiles_high = int(np.ceil(n_tiles / n_tiles_wide)) 

1549 

1550 allsky_image = None 

1551 

1552 allsky_order_uri = hips_base_path.join(f"Norder{allsky_order}", forceDirectory=True) 

1553 pixel_regex = re.compile(r"Npix([0-9]+)\.png$") 

1554 png_uris = list( 

1555 ResourcePath.findFileResources( 

1556 candidates=[allsky_order_uri], 

1557 file_filter=pixel_regex, 

1558 ) 

1559 ) 

1560 

1561 for png_uri in png_uris: 

1562 matches = re.match(pixel_regex, png_uri.basename()) 

1563 pix_num = int(matches.group(1)) 

1564 tile_image = Image.open(io.BytesIO(png_uri.read())) 

1565 row = math.floor(pix_num//n_tiles_wide) 

1566 column = pix_num % n_tiles_wide 

1567 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size) 

1568 tile_image_shrunk = tile_image.resize((tile_size, tile_size)) 

1569 

1570 if allsky_image is None: 

1571 allsky_image = Image.new( 

1572 tile_image.mode, 

1573 (n_tiles_wide*tile_size, n_tiles_high*tile_size), 

1574 ) 

1575 allsky_image.paste(tile_image_shrunk, box) 

1576 

1577 uri = allsky_order_uri.join("Allsky.png") 

1578 

1579 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri: 

1580 allsky_image.save(temporary_uri.ospath) 

1581 

1582 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True) 

1583 

1584 def _get_dir_number(self, pixel): 

1585 """Compute the directory number from a pixel. 

1586 

1587 Parameters 

1588 ---------- 

1589 pixel : `int` 

1590 HEALPix pixel number. 

1591 

1592 Returns 

1593 ------- 

1594 dir_number : `int` 

1595 HiPS directory number. 

1596 """ 

1597 return (pixel//10000)*10000 

1598 

1599 

1600class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections, 

1601 dimensions=("instrument", ), 

1602 defaultTemplates={"coaddName": "deep"}): 

1603 hips_exposure_handles = pipeBase.connectionTypes.Input( 

1604 doc="HiPS-compatible HPX images.", 

1605 name="{coaddName}Coadd_hpx", 

1606 storageClass="ExposureF", 

1607 dimensions=("healpix11", "band"), 

1608 multiple=True, 

1609 deferLoad=True, 

1610 ) 

1611 

1612 

1613class GenerateColorHipsConfig(GenerateHipsConfig, 

1614 pipelineConnections=GenerateColorHipsConnections): 

1615 """Configuration parameters for GenerateColorHipsTask.""" 

1616 blue_channel_band = pexConfig.Field( 

1617 doc="Band to use for blue channel of color pngs.", 

1618 dtype=str, 

1619 default="g", 

1620 ) 

1621 green_channel_band = pexConfig.Field( 

1622 doc="Band to use for green channel of color pngs.", 

1623 dtype=str, 

1624 default="r", 

1625 ) 

1626 red_channel_band = pexConfig.Field( 

1627 doc="Band to use for red channel of color pngs.", 

1628 dtype=str, 

1629 default="i", 

1630 ) 

1631 png_color_asinh_minimum = pexConfig.Field( 

1632 doc="AsinhMapping intensity to be mapped to black for color png scaling.", 

1633 dtype=float, 

1634 default=0.0, 

1635 ) 

1636 png_color_asinh_stretch = pexConfig.Field( 

1637 doc="AsinhMapping linear stretch for color png scaling.", 

1638 dtype=float, 

1639 default=5.0, 

1640 ) 

1641 png_color_asinh_softening = pexConfig.Field( 

1642 doc="AsinhMapping softening parameter (Q) for color png scaling.", 

1643 dtype=float, 

1644 default=8.0, 

1645 ) 

1646 

1647 

1648class GenerateColorHipsTask(GenerateHipsTask): 

1649 """Task for making a HiPS tree with color pngs.""" 

1650 ConfigClass = GenerateColorHipsConfig 

1651 _DefaultName = "generateColorHips" 

1652 color_task = True 

1653 

1654 def _check_data_bands(self, data_bands): 

1655 """Check the data for configured bands. 

1656 

1657 Warn if any color bands are missing data. 

1658 

1659 Parameters 

1660 ---------- 

1661 data_bands : `set` [`str`] 

1662 Bands from the input data. 

1663 

1664 Returns 

1665 ------- 

1666 bands : `list` [`str`] 

1667 List of bands in bgr color order. 

1668 """ 

1669 if len(data_bands) == 0: 

1670 raise RuntimeError("GenerateColorHipsTask must have data from at least one band.") 

1671 

1672 if self.config.blue_channel_band not in data_bands: 

1673 self.log.warning( 

1674 "Color png blue_channel_band %s not in dataset.", 

1675 self.config.blue_channel_band 

1676 ) 

1677 if self.config.green_channel_band not in data_bands: 

1678 self.log.warning( 

1679 "Color png green_channel_band %s not in dataset.", 

1680 self.config.green_channel_band 

1681 ) 

1682 if self.config.red_channel_band not in data_bands: 

1683 self.log.warning( 

1684 "Color png red_channel_band %s not in dataset.", 

1685 self.config.red_channel_band 

1686 ) 

1687 

1688 bands = [ 

1689 self.config.blue_channel_band, 

1690 self.config.green_channel_band, 

1691 self.config.red_channel_band, 

1692 ] 

1693 

1694 return bands