Coverage for python/lsst/pipe/tasks/hips.py: 13%
599 statements
« prev ^ index » next coverage.py v7.2.3, created at 2023-04-20 04:18 -0700
« prev ^ index » next coverage.py v7.2.3, created at 2023-04-20 04:18 -0700
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22"""Tasks for making and manipulating HIPS images."""
24__all__ = ["HighResolutionHipsTask", "HighResolutionHipsConfig", "HighResolutionHipsConnections",
25 "GenerateHipsTask", "GenerateHipsConfig", "GenerateColorHipsTask", "GenerateColorHipsConfig"]
27from collections import defaultdict
28import numpy as np
29import argparse
30import io
31import sys
32import re
33import warnings
34import math
35from datetime import datetime
36import hpgeom as hpg
37import healsparse as hsp
38from astropy.io import fits
39from astropy.visualization.lupton_rgb import AsinhMapping
40from PIL import Image
42from lsst.sphgeom import RangeSet, HealpixPixelization
43from lsst.utils.timer import timeMethod
44from lsst.daf.butler import Butler, DatasetRef, Quantum, SkyPixDimension, UnresolvedRefWarning
45import lsst.pex.config as pexConfig
46import lsst.pipe.base as pipeBase
47import lsst.afw.geom as afwGeom
48import lsst.afw.math as afwMath
49import lsst.afw.image as afwImage
50import lsst.geom as geom
51from lsst.afw.geom import makeHpxWcs
52from lsst.resources import ResourcePath
55class HighResolutionHipsConnections(pipeBase.PipelineTaskConnections,
56 dimensions=("healpix9", "band"),
57 defaultTemplates={"coaddName": "deep"}):
58 coadd_exposure_handles = pipeBase.connectionTypes.Input(
59 doc="Coadded exposures to convert to HIPS format.",
60 name="{coaddName}Coadd_calexp",
61 storageClass="ExposureF",
62 dimensions=("tract", "patch", "skymap", "band"),
63 multiple=True,
64 deferLoad=True,
65 )
66 hips_exposures = pipeBase.connectionTypes.Output(
67 doc="HiPS-compatible HPX image.",
68 name="{coaddName}Coadd_hpx",
69 storageClass="ExposureF",
70 dimensions=("healpix11", "band"),
71 multiple=True,
72 )
74 def __init__(self, *, config=None):
75 super().__init__(config=config)
77 quantum_order = None
78 for dim in self.dimensions:
79 if "healpix" in dim:
80 if quantum_order is not None:
81 raise ValueError("Must not specify more than one quantum healpix dimension.")
82 quantum_order = int(dim.split("healpix")[1])
83 if quantum_order is None:
84 raise ValueError("Must specify a healpix dimension in quantum dimensions.")
86 if quantum_order > config.hips_order:
87 raise ValueError("Quantum healpix dimension order must not be greater than hips_order")
89 order = None
90 for dim in self.hips_exposures.dimensions:
91 if "healpix" in dim:
92 if order is not None:
93 raise ValueError("Must not specify more than one healpix dimension.")
94 order = int(dim.split("healpix")[1])
95 if order is None:
96 raise ValueError("Must specify a healpix dimension in hips_exposure dimensions.")
98 if order != config.hips_order:
99 raise ValueError("healpix dimension order must match config.hips_order.")
102class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig,
103 pipelineConnections=HighResolutionHipsConnections):
104 """Configuration parameters for HighResolutionHipsTask.
106 Notes
107 -----
108 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to
109 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix
110 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which
111 defines the target resolution of the HiPS image. The IVOA recommends
112 shift_order=9, for 2**9=512 pixels on a side.
114 Table 5 from
115 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf
116 shows the relationship between hips_order, number of tiles (full
117 sky coverage), cell size, and sub-pixel size/image resolution (with
118 the default shift_order=9):
119 +------------+-----------------+--------------+------------------+
120 | hips_order | Number of Tiles | Cell Size | Image Resolution |
121 +============+=================+==============+==================+
122 | 0 | 12 | 58.63 deg | 6.871 arcmin |
123 | 1 | 48 | 29.32 deg | 3.435 arcmin |
124 | 2 | 192 | 14.66 deg | 1.718 arcmin |
125 | 3 | 768 | 7.329 deg | 51.53 arcsec |
126 | 4 | 3072 | 3.665 deg | 25.77 arcsec |
127 | 5 | 12288 | 1.832 deg | 12.88 arcsec |
128 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec |
129 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec |
130 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec |
131 | 9 | 3145728 | 6.871 arcmin | 805.2mas |
132 | 10 | 12582912 | 3.435 arcmin | 402.6mas |
133 | 11 | 50331648 | 1.718 arcmin | 201.3mas |
134 | 12 | 201326592 | 51.53 arcsec | 100.6mas |
135 | 13 | 805306368 | 25.77 arcsec | 50.32mas |
136 +------------+-----------------+--------------+------------------+
137 """
138 hips_order = pexConfig.Field(
139 doc="HIPS image order.",
140 dtype=int,
141 default=11,
142 )
143 shift_order = pexConfig.Field(
144 doc="HIPS shift order (such that each tile is 2**shift_order pixels on a side)",
145 dtype=int,
146 default=9,
147 )
148 warp = pexConfig.ConfigField(
149 dtype=afwMath.Warper.ConfigClass,
150 doc="Warper configuration",
151 )
153 def setDefaults(self):
154 self.warp.warpingKernelName = "lanczos5"
157class HipsTaskNameDescriptor:
158 """Descriptor used create a DefaultName that matches the order of
159 the defined dimensions in the connections class.
161 Parameters
162 ----------
163 prefix : `str`
164 The prefix of the Default name, to which the order will be
165 appended.
166 """
167 def __init__(self, prefix):
168 # create a defaultName template
169 self._defaultName = f"{prefix}{{}}"
170 self._order = None
172 def __get__(self, obj, klass=None):
173 if klass is None:
174 raise RuntimeError(
175 "HipsTaskDescriptor was used in an unexpected context"
176 )
177 if self._order is None:
178 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions
179 for dim in klassDimensions:
180 if (match := re.match(r"^healpix(\d*)$", dim)) is not None:
181 self._order = int(match.group(1))
182 break
183 else:
184 raise RuntimeError(
185 "Could not find healpix dimension in connections class"
186 )
187 return self._defaultName.format(self._order)
190class HighResolutionHipsTask(pipeBase.PipelineTask):
191 """Task for making high resolution HiPS images."""
192 ConfigClass = HighResolutionHipsConfig
193 _DefaultName = HipsTaskNameDescriptor("highResolutionHips")
195 def __init__(self, **kwargs):
196 super().__init__(**kwargs)
197 self.warper = afwMath.Warper.fromConfig(self.config.warp)
199 @timeMethod
200 def runQuantum(self, butlerQC, inputRefs, outputRefs):
201 inputs = butlerQC.get(inputRefs)
203 healpix_dim = f"healpix{self.config.hips_order}"
205 pixels = [hips_exposure.dataId[healpix_dim]
206 for hips_exposure in outputRefs.hips_exposures]
208 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs["coadd_exposure_handles"])
210 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]:
211 hips_exposure_ref for hips_exposure_ref in outputRefs.hips_exposures}
212 for pixel, hips_exposure in outputs.hips_exposures.items():
213 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel])
215 def run(self, pixels, coadd_exposure_handles):
216 """Run the HighResolutionHipsTask.
218 Parameters
219 ----------
220 pixels : `Iterable` [ `int` ]
221 Iterable of healpix pixels (nest ordering) to warp to.
222 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`]
223 Handles for the coadd exposures.
225 Returns
226 -------
227 outputs : `lsst.pipe.base.Struct`
228 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value)
229 """
230 self.log.info("Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order)
232 npix = 2**self.config.shift_order
233 bbox_hpx = geom.Box2I(corner=geom.Point2I(0, 0),
234 dimensions=geom.Extent2I(npix, npix))
236 # For each healpix pixel we will create an empty exposure with the
237 # correct HPX WCS. We furthermore create a dict to hold each of
238 # the warps that will go into each HPX exposure.
239 exp_hpx_dict = {}
240 warp_dict = {}
241 for pixel in pixels:
242 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order)
243 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx)
244 exp_hpx_dict[pixel] = exp_hpx
245 warp_dict[pixel] = []
247 first_handle = True
248 # Loop over input coadd exposures to minimize i/o (this speeds things
249 # up by ~8x to batch together pixels that overlap a given coadd).
250 for handle in coadd_exposure_handles:
251 coadd_exp = handle.get()
253 # For each pixel, warp the coadd to the HPX WCS for the pixel.
254 for pixel in pixels:
255 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx)
257 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs())
258 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan)
260 if first_handle:
261 # Make sure the mask planes, filter, and photocalib of the output
262 # exposure match the (first) input exposure.
263 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict())
264 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter())
265 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib())
267 if warped.getBBox().getArea() == 0 or not np.any(np.isfinite(warped.image.array)):
268 # There is no overlap, skip.
269 self.log.debug(
270 "No overlap between output HPX %d and input exposure %s",
271 pixel,
272 handle.dataId
273 )
274 continue
276 exp.maskedImage.assign(warped.maskedImage, warped.getBBox())
277 warp_dict[pixel].append(exp.maskedImage)
279 first_handle = False
281 stats_flags = afwMath.stringToStatisticsProperty("MEAN")
282 stats_ctrl = afwMath.StatisticsControl()
283 stats_ctrl.setNanSafe(True)
284 stats_ctrl.setWeighted(True)
285 stats_ctrl.setCalcErrorFromInputVariance(True)
287 # Loop over pixels and combine the warps for each pixel.
288 # The combination is done with a simple mean for pixels that
289 # overlap in neighboring patches.
290 for pixel in pixels:
291 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan)
293 if not warp_dict[pixel]:
294 # Nothing in this pixel
295 self.log.debug("No data in HPX pixel %d", pixel)
296 # Remove the pixel from the output, no need to persist an
297 # empty exposure.
298 exp_hpx_dict.pop(pixel)
299 continue
301 exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack(
302 warp_dict[pixel],
303 stats_flags,
304 stats_ctrl,
305 [1.0]*len(warp_dict[pixel]),
306 clipped=0,
307 maskMap=[]
308 )
310 return pipeBase.Struct(hips_exposures=exp_hpx_dict)
312 @classmethod
313 def build_quantum_graph_cli(cls, argv):
314 """A command-line interface entry point to `build_quantum_graph`.
315 This method provides the implementation for the
316 ``build-high-resolution-hips-qg`` script.
318 Parameters
319 ----------
320 argv : `Sequence` [ `str` ]
321 Command-line arguments (e.g. ``sys.argv[1:]``).
322 """
323 parser = cls._make_cli_parser()
325 args = parser.parse_args(argv)
327 if args.subparser_name is None:
328 parser.print_help()
329 sys.exit(1)
331 pipeline = pipeBase.Pipeline.from_uri(args.pipeline)
332 expanded_pipeline = list(pipeline.toExpandedPipeline())
334 if len(expanded_pipeline) != 1:
335 raise RuntimeError(f"Pipeline file {args.pipeline} may only contain one task.")
337 (task_def,) = expanded_pipeline
339 butler = Butler(args.butler_config, collections=args.input)
341 if args.subparser_name == "segment":
342 # Do the segmentation
343 hpix_pixelization = HealpixPixelization(level=args.hpix_build_order)
344 dataset = task_def.connections.coadd_exposure_handles.name
345 data_ids = set(butler.registry.queryDataIds("tract", datasets=dataset).expanded())
346 region_pixels = []
347 for data_id in data_ids:
348 region = data_id.region
349 pixel_range = hpix_pixelization.envelope(region)
350 for r in pixel_range.ranges():
351 region_pixels.extend(range(r[0], r[1]))
352 indices = np.unique(region_pixels)
354 print(f"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:")
355 for pixel in indices:
356 print(pixel)
358 elif args.subparser_name == "build":
359 # Build the quantum graph.
361 build_ranges = RangeSet(sorted(args.pixels))
363 qg = cls.build_quantum_graph(
364 task_def,
365 butler.registry,
366 args.hpix_build_order,
367 build_ranges,
368 where=args.where,
369 collections=args.input
370 )
371 qg.saveUri(args.save_qgraph)
373 @classmethod
374 def _make_cli_parser(cls):
375 """Make the command-line parser.
377 Returns
378 -------
379 parser : `argparse.ArgumentParser`
380 """
381 parser = argparse.ArgumentParser(
382 description=(
383 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets."
384 ),
385 )
386 subparsers = parser.add_subparsers(help="sub-command help", dest="subparser_name")
388 parser_segment = subparsers.add_parser("segment",
389 help="Determine survey segments for workflow.")
390 parser_build = subparsers.add_parser("build",
391 help="Build quantum graph for HighResolutionHipsTask")
393 for sub in [parser_segment, parser_build]:
394 # These arguments are in common.
395 sub.add_argument(
396 "-b",
397 "--butler-config",
398 type=str,
399 help="Path to data repository or butler configuration.",
400 required=True,
401 )
402 sub.add_argument(
403 "-p",
404 "--pipeline",
405 type=str,
406 help="Pipeline file, limited to one task.",
407 required=True,
408 )
409 sub.add_argument(
410 "-i",
411 "--input",
412 type=str,
413 nargs="+",
414 help="Input collection(s) to search for coadd exposures.",
415 required=True,
416 )
417 sub.add_argument(
418 "-o",
419 "--hpix_build_order",
420 type=int,
421 default=1,
422 help="HEALPix order to segment sky for building quantum graph files.",
423 )
424 sub.add_argument(
425 "-w",
426 "--where",
427 type=str,
428 default=None,
429 help="Data ID expression used when querying for input coadd datasets.",
430 )
432 parser_build.add_argument(
433 "-q",
434 "--save-qgraph",
435 type=str,
436 help="Output filename for QuantumGraph.",
437 required=True,
438 )
439 parser_build.add_argument(
440 "-P",
441 "--pixels",
442 type=int,
443 nargs="+",
444 help="Pixels at --hpix_build_order to generate quantum graph.",
445 required=True,
446 )
448 return parser
450 @classmethod
451 def build_quantum_graph(
452 cls,
453 task_def,
454 registry,
455 constraint_order,
456 constraint_ranges,
457 where=None,
458 collections=None,
459 ):
460 """Generate a `QuantumGraph` for running just this task.
462 This is a temporary workaround for incomplete butler query support for
463 HEALPix dimensions.
465 Parameters
466 ----------
467 task_def : `lsst.pipe.base.TaskDef`
468 Task definition.
469 registry : `lsst.daf.butler.Registry`
470 Client for the butler database. May be read-only.
471 constraint_order : `int`
472 HEALPix order used to contrain which quanta are generated, via
473 ``constraint_indices``. This should be a coarser grid (smaller
474 order) than the order used for the task's quantum and output data
475 IDs, and ideally something between the spatial scale of a patch or
476 the data repository's "common skypix" system (usually ``htm7``).
477 constraint_ranges : `lsst.sphgeom.RangeSet`
478 RangeSet which describes constraint pixels (HEALPix NEST, with order
479 constraint_order) to constrain generated quanta.
480 where : `str`, optional
481 A boolean `str` expression of the form accepted by
482 `Registry.queryDatasets` to constrain input datasets. This may
483 contain a constraint on tracts, patches, or bands, but not HEALPix
484 indices. Constraints on tracts and patches should usually be
485 unnecessary, however - existing coadds that overlap the given
486 HEALpix indices will be selected without such a constraint, and
487 providing one may reject some that should normally be included.
488 collections : `str` or `Iterable` [ `str` ], optional
489 Collection or collections to search for input datasets, in order.
490 If not provided, ``registry.defaults.collections`` will be
491 searched.
492 """
493 config = task_def.config
495 dataset_types = pipeBase.PipelineDatasetTypes.fromPipeline(pipeline=[task_def], registry=registry)
496 # Since we know this is the only task in the pipeline, we know there
497 # is only one overall input and one overall output.
498 (input_dataset_type,) = dataset_types.inputs
500 # Extract the main output dataset type (which needs multiple
501 # DatasetRefs, and tells us the output HPX level), and make a set of
502 # what remains for more mechanical handling later.
503 output_dataset_type = dataset_types.outputs[task_def.connections.hips_exposures.name]
504 incidental_output_dataset_types = dataset_types.outputs.copy()
505 incidental_output_dataset_types.remove(output_dataset_type)
506 (hpx_output_dimension,) = (d for d in output_dataset_type.dimensions
507 if isinstance(d, SkyPixDimension))
509 constraint_hpx_pixelization = registry.dimensions[f"healpix{constraint_order}"].pixelization
510 common_skypix_name = registry.dimensions.commonSkyPix.name
511 common_skypix_pixelization = registry.dimensions.commonSkyPix.pixelization
513 # We will need all the pixels at the quantum resolution as well
514 task_dimensions = registry.dimensions.extract(task_def.connections.dimensions)
515 (hpx_dimension,) = (d for d in task_dimensions if d.name != "band")
516 hpx_pixelization = hpx_dimension.pixelization
518 if hpx_pixelization.level < constraint_order:
519 raise ValueError(f"Quantum order {hpx_pixelization.level} must be < {constraint_order}")
520 hpx_ranges = constraint_ranges.scaled(4**(hpx_pixelization.level - constraint_order))
522 # We can be generous in looking for pixels here, because we constraint by actual
523 # patch regions below.
524 common_skypix_ranges = RangeSet()
525 for begin, end in constraint_ranges:
526 for hpx_index in range(begin, end):
527 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index)
528 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region)
530 # To keep the query from getting out of hand (and breaking) we simplify until we have fewer
531 # than 100 ranges which seems to work fine.
532 for simp in range(1, 10):
533 if len(common_skypix_ranges) < 100:
534 break
535 common_skypix_ranges.simplify(simp)
537 # Use that RangeSet to assemble a WHERE constraint expression. This
538 # could definitely get too big if the "constraint healpix" order is too
539 # fine.
540 where_terms = []
541 bind = {}
542 for n, (begin, end) in enumerate(common_skypix_ranges):
543 stop = end - 1 # registry range syntax is inclusive
544 if begin == stop:
545 where_terms.append(f"{common_skypix_name} = cpx{n}")
546 bind[f"cpx{n}"] = begin
547 else:
548 where_terms.append(f"({common_skypix_name} >= cpx{n}a AND {common_skypix_name} <= cpx{n}b)")
549 bind[f"cpx{n}a"] = begin
550 bind[f"cpx{n}b"] = stop
551 if where is None:
552 where = " OR ".join(where_terms)
553 else:
554 where = f"({where}) AND ({' OR '.join(where_terms)})"
555 # Query for input datasets with this constraint, and ask for expanded
556 # data IDs because we want regions. Immediately group this by patch so
557 # we don't do later geometric stuff n_bands more times than we need to.
558 input_refs = registry.queryDatasets(
559 input_dataset_type,
560 where=where,
561 findFirst=True,
562 collections=collections,
563 bind=bind
564 ).expanded()
565 inputs_by_patch = defaultdict(set)
566 patch_dimensions = registry.dimensions.extract(["patch"])
567 for input_ref in input_refs:
568 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(input_ref)
569 if not inputs_by_patch:
570 message_body = "\n".join(input_refs.explain_no_results())
571 raise RuntimeError(f"No inputs found:\n{message_body}")
573 # Iterate over patches and compute the set of output healpix pixels
574 # that overlap each one. Use that to associate inputs with output
575 # pixels, but only for the output pixels we've already identified.
576 inputs_by_hpx = defaultdict(set)
577 for patch_data_id, input_refs_for_patch in inputs_by_patch.items():
578 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region)
579 for begin, end in patch_hpx_ranges & hpx_ranges:
580 for hpx_index in range(begin, end):
581 inputs_by_hpx[hpx_index].update(input_refs_for_patch)
582 # Iterate over the dict we just created and create the actual quanta.
583 quanta = []
584 for hpx_index, input_refs_for_hpx_index in inputs_by_hpx.items():
585 # Group inputs by band.
586 input_refs_by_band = defaultdict(list)
587 for input_ref in input_refs_for_hpx_index:
588 input_refs_by_band[input_ref.dataId["band"]].append(input_ref)
589 # Iterate over bands to make quanta.
590 for band, input_refs_for_band in input_refs_by_band.items():
591 data_id = registry.expandDataId({hpx_dimension: hpx_index, "band": band})
593 hpx_pixel_ranges = RangeSet(hpx_index)
594 hpx_output_ranges = hpx_pixel_ranges.scaled(4**(config.hips_order - hpx_pixelization.level))
595 output_data_ids = []
596 for begin, end in hpx_output_ranges:
597 for hpx_output_index in range(begin, end):
598 output_data_ids.append(
599 registry.expandDataId({hpx_output_dimension: hpx_output_index, "band": band})
600 )
601 with warnings.catch_warnings():
602 warnings.simplefilter("ignore", category=UnresolvedRefWarning)
603 outputs = {dt: [DatasetRef(dt, data_id)] for dt in incidental_output_dataset_types}
604 outputs[output_dataset_type] = [DatasetRef(output_dataset_type, data_id)
605 for data_id in output_data_ids]
606 quanta.append(
607 Quantum(
608 taskName=task_def.taskName,
609 taskClass=task_def.taskClass,
610 dataId=data_id,
611 initInputs={},
612 inputs={input_dataset_type: input_refs_for_band},
613 outputs=outputs,
614 )
615 )
617 if len(quanta) == 0:
618 raise RuntimeError("Given constraints yielded empty quantum graph.")
620 return pipeBase.QuantumGraph(quanta={task_def: quanta})
623class HipsPropertiesSpectralTerm(pexConfig.Config):
624 lambda_min = pexConfig.Field(
625 doc="Minimum wavelength (nm)",
626 dtype=float,
627 )
628 lambda_max = pexConfig.Field(
629 doc="Maximum wavelength (nm)",
630 dtype=float,
631 )
634class HipsPropertiesConfig(pexConfig.Config):
635 """Configuration parameters for writing a HiPS properties file."""
636 creator_did_template = pexConfig.Field(
637 doc=("Unique identifier of the HiPS - Format: IVOID. "
638 "Use ``{band}`` to substitute the band name."),
639 dtype=str,
640 optional=False,
641 )
642 obs_collection = pexConfig.Field(
643 doc="Short name of original data set - Format: one word",
644 dtype=str,
645 optional=True,
646 )
647 obs_description_template = pexConfig.Field(
648 doc=("Data set description - Format: free text, longer free text "
649 "description of the dataset. Use ``{band}`` to substitute "
650 "the band name."),
651 dtype=str,
652 )
653 prov_progenitor = pexConfig.ListField(
654 doc="Provenance of the original data - Format: free text",
655 dtype=str,
656 default=[],
657 )
658 obs_title_template = pexConfig.Field(
659 doc=("Data set title format: free text, but should be short. "
660 "Use ``{band}`` to substitute the band name."),
661 dtype=str,
662 optional=False,
663 )
664 spectral_ranges = pexConfig.ConfigDictField(
665 doc=("Mapping from band to lambda_min, lamba_max (nm). May be approximate."),
666 keytype=str,
667 itemtype=HipsPropertiesSpectralTerm,
668 default={},
669 )
670 initial_ra = pexConfig.Field(
671 doc="Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.",
672 dtype=float,
673 optional=True,
674 )
675 initial_dec = pexConfig.Field(
676 doc="Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.",
677 dtype=float,
678 optional=True,
679 )
680 initial_fov = pexConfig.Field(
681 doc="Initial field-of-view (deg). If not set will use ~1 healpix tile.",
682 dtype=float,
683 optional=True,
684 )
685 obs_ack = pexConfig.Field(
686 doc="Observation acknowledgements (free text).",
687 dtype=str,
688 optional=True,
689 )
690 t_min = pexConfig.Field(
691 doc="Time (MJD) of earliest observation included in HiPS",
692 dtype=float,
693 optional=True,
694 )
695 t_max = pexConfig.Field(
696 doc="Time (MJD) of latest observation included in HiPS",
697 dtype=float,
698 optional=True,
699 )
701 def validate(self):
702 super().validate()
704 if self.obs_collection is not None:
705 if re.search(r"\s", self.obs_collection):
706 raise ValueError("obs_collection cannot contain any space characters.")
708 def setDefaults(self):
709 # Values here taken from
710 # https://github.com/lsst-dm/dax_obscore/blob/44ac15029136e2ec15/configs/dp02.yaml#L46
711 u_term = HipsPropertiesSpectralTerm()
712 u_term.lambda_min = 330.
713 u_term.lambda_max = 400.
714 self.spectral_ranges["u"] = u_term
715 g_term = HipsPropertiesSpectralTerm()
716 g_term.lambda_min = 402.
717 g_term.lambda_max = 552.
718 self.spectral_ranges["g"] = g_term
719 r_term = HipsPropertiesSpectralTerm()
720 r_term.lambda_min = 552.
721 r_term.lambda_max = 691.
722 self.spectral_ranges["r"] = r_term
723 i_term = HipsPropertiesSpectralTerm()
724 i_term.lambda_min = 691.
725 i_term.lambda_max = 818.
726 self.spectral_ranges["i"] = i_term
727 z_term = HipsPropertiesSpectralTerm()
728 z_term.lambda_min = 818.
729 z_term.lambda_max = 922.
730 self.spectral_ranges["z"] = z_term
731 y_term = HipsPropertiesSpectralTerm()
732 y_term.lambda_min = 970.
733 y_term.lambda_max = 1060.
734 self.spectral_ranges["y"] = y_term
737class GenerateHipsConnections(pipeBase.PipelineTaskConnections,
738 dimensions=("instrument", "band"),
739 defaultTemplates={"coaddName": "deep"}):
740 hips_exposure_handles = pipeBase.connectionTypes.Input(
741 doc="HiPS-compatible HPX images.",
742 name="{coaddName}Coadd_hpx",
743 storageClass="ExposureF",
744 dimensions=("healpix11", "band"),
745 multiple=True,
746 deferLoad=True,
747 )
750class GenerateHipsConfig(pipeBase.PipelineTaskConfig,
751 pipelineConnections=GenerateHipsConnections):
752 """Configuration parameters for GenerateHipsTask."""
753 # WARNING: In general PipelineTasks are not allowed to do any outputs
754 # outside of the butler. This task has been given (temporary)
755 # Special Dispensation because of the nature of HiPS outputs until
756 # a more controlled solution can be found.
757 hips_base_uri = pexConfig.Field(
758 doc="URI to HiPS base for output.",
759 dtype=str,
760 optional=False,
761 )
762 min_order = pexConfig.Field(
763 doc="Minimum healpix order for HiPS tree.",
764 dtype=int,
765 default=3,
766 )
767 properties = pexConfig.ConfigField(
768 dtype=HipsPropertiesConfig,
769 doc="Configuration for properties file.",
770 )
771 allsky_tilesize = pexConfig.Field(
772 dtype=int,
773 doc="Allsky.png tile size. Must be power of 2.",
774 default=512,
775 )
776 png_gray_asinh_minimum = pexConfig.Field(
777 doc="AsinhMapping intensity to be mapped to black for grayscale png scaling.",
778 dtype=float,
779 default=0.0,
780 )
781 png_gray_asinh_stretch = pexConfig.Field(
782 doc="AsinhMapping linear stretch for grayscale png scaling.",
783 dtype=float,
784 default=2.0,
785 )
786 png_gray_asinh_softening = pexConfig.Field(
787 doc="AsinhMapping softening parameter (Q) for grayscale png scaling.",
788 dtype=float,
789 default=8.0,
790 )
793class GenerateHipsTask(pipeBase.PipelineTask):
794 """Task for making a HiPS tree with FITS and grayscale PNGs."""
795 ConfigClass = GenerateHipsConfig
796 _DefaultName = "generateHips"
797 color_task = False
799 @timeMethod
800 def runQuantum(self, butlerQC, inputRefs, outputRefs):
801 inputs = butlerQC.get(inputRefs)
803 dims = inputRefs.hips_exposure_handles[0].dataId.names
804 order = None
805 for dim in dims:
806 if "healpix" in dim:
807 order = int(dim.split("healpix")[1])
808 healpix_dim = dim
809 break
810 else:
811 raise RuntimeError("Could not determine healpix order for input exposures.")
813 hips_exposure_handle_dict = {
814 (hips_exposure_handle.dataId[healpix_dim],
815 hips_exposure_handle.dataId["band"]): hips_exposure_handle
816 for hips_exposure_handle in inputs["hips_exposure_handles"]
817 }
819 data_bands = {hips_exposure_handle.dataId["band"]
820 for hips_exposure_handle in inputs["hips_exposure_handles"]}
821 bands = self._check_data_bands(data_bands)
823 self.run(
824 bands=bands,
825 max_order=order,
826 hips_exposure_handle_dict=hips_exposure_handle_dict,
827 do_color=self.color_task,
828 )
830 def _check_data_bands(self, data_bands):
831 """Check that the data has only a single band.
833 Parameters
834 ----------
835 data_bands : `set` [`str`]
836 Bands from the input data.
838 Returns
839 -------
840 bands : `list` [`str`]
841 List of single band to process.
843 Raises
844 ------
845 RuntimeError if there is not exactly one band.
846 """
847 if len(data_bands) != 1:
848 raise RuntimeError("GenerateHipsTask can only use data from a single band.")
850 return list(data_bands)
852 @timeMethod
853 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False):
854 """Run the GenerateHipsTask.
856 Parameters
857 ----------
858 bands : `list [ `str` ]
859 List of bands to be processed (or single band).
860 max_order : `int`
861 HEALPix order of the maximum (native) HPX exposures.
862 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`}
863 Dict of handles for the HiPS high-resolution exposures.
864 Key is (pixel number, ``band``).
865 do_color : `bool`, optional
866 Do color pngs instead of per-band grayscale.
867 """
868 min_order = self.config.min_order
870 if not do_color:
871 png_grayscale_mapping = AsinhMapping(
872 self.config.png_gray_asinh_minimum,
873 self.config.png_gray_asinh_stretch,
874 Q=self.config.png_gray_asinh_softening,
875 )
876 else:
877 png_color_mapping = AsinhMapping(
878 self.config.png_color_asinh_minimum,
879 self.config.png_color_asinh_stretch,
880 Q=self.config.png_color_asinh_softening,
881 )
883 bcb = self.config.blue_channel_band
884 gcb = self.config.green_channel_band
885 rcb = self.config.red_channel_band
886 colorstr = f"{bcb}{gcb}{rcb}"
888 # The base path is based on the hips_base_uri.
889 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=True)
891 # We need to unique-ify the pixels because they show up for multiple bands.
892 # The output of this is a sorted array.
893 pixels = np.unique(np.array([pixel for pixel, _ in hips_exposure_handle_dict.keys()]))
895 # Add a "gutter" pixel at the end. Start with 0 which maps to 0 always.
896 pixels = np.append(pixels, [0])
898 # Convert the pixels to each order that will be generated.
899 pixels_shifted = {}
900 pixels_shifted[max_order] = pixels
901 for order in range(max_order - 1, min_order - 1, -1):
902 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2)
904 # And set the gutter to an illegal pixel value.
905 for order in range(min_order, max_order + 1):
906 pixels_shifted[order][-1] = -1
908 # Read in the first pixel for determining image properties.
909 exp0 = list(hips_exposure_handle_dict.values())[0].get()
910 bbox = exp0.getBBox()
911 npix = bbox.getWidth()
912 shift_order = int(np.round(np.log2(npix)))
914 # Create blank exposures for each level, including the highest order.
915 # We also make sure we create blank exposures for any bands used in the color
916 # PNGs, even if they aren't available.
917 exposures = {}
918 for band in bands:
919 for order in range(min_order, max_order + 1):
920 exp = exp0.Factory(bbox=bbox)
921 exp.image.array[:, :] = np.nan
922 exposures[(band, order)] = exp
924 # Loop over all pixels, avoiding the gutter.
925 for pixel_counter, pixel in enumerate(pixels[:-1]):
926 self.log.debug("Working on high resolution pixel %d", pixel)
927 for band in bands:
928 # Read all the exposures here for the highest order.
929 # There will always be at least one band with a HiPS image available
930 # at the highest order. However, for color images it is possible that
931 # not all bands have coverage so we require this check.
932 if (pixel, band) in hips_exposure_handle_dict:
933 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get()
935 # Go up the HiPS tree.
936 # We only write pixels and rebin to fill the parent pixel when we are
937 # done with a current pixel, which is determined if the next pixel
938 # has a different pixel number.
939 for order in range(max_order, min_order - 1, -1):
940 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]:
941 # This order is not done, and so none of the other orders will be.
942 break
944 # We can now write out the images for each band.
945 # Note this will always trigger at the max order where each pixel is unique.
946 if not do_color:
947 for band in bands:
948 self._write_hips_image(
949 hips_base_path.join(f"band_{band}", forceDirectory=True),
950 order,
951 pixels_shifted[order][pixel_counter],
952 exposures[(band, order)].image,
953 png_grayscale_mapping,
954 shift_order=shift_order,
955 )
956 else:
957 # Make a color png.
958 self._write_hips_color_png(
959 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
960 order,
961 pixels_shifted[order][pixel_counter],
962 exposures[(self.config.red_channel_band, order)].image,
963 exposures[(self.config.green_channel_band, order)].image,
964 exposures[(self.config.blue_channel_band, order)].image,
965 png_color_mapping,
966 )
968 log_level = self.log.INFO if order == (max_order - 3) else self.log.DEBUG
969 self.log.log(
970 log_level,
971 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)",
972 ",".join(bands),
973 order,
974 pixels_shifted[order][pixel_counter],
975 pixel_counter,
976 len(pixels) - 1,
977 )
979 # When we are at the top of the tree, erase top level images and continue.
980 if order == min_order:
981 for band in bands:
982 exposures[(band, order)].image.array[:, :] = np.nan
983 continue
985 # Now average the images for each band.
986 for band in bands:
987 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2)
988 with warnings.catch_warnings():
989 warnings.simplefilter("ignore")
990 binned_image_arr = np.nanmean(arr, axis=(1, 3))
992 # Fill the next level up. We figure out which of the four
993 # sub-pixels the current pixel occupies.
994 sub_index = (pixels_shifted[order][pixel_counter]
995 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2))
997 # Fill exposure at the next level up.
998 exp = exposures[(band, order - 1)]
1000 # Fill the correct subregion.
1001 if sub_index == 0:
1002 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr
1003 elif sub_index == 1:
1004 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr
1005 elif sub_index == 2:
1006 exp.image.array[npix//2:, npix//2:] = binned_image_arr
1007 elif sub_index == 3:
1008 exp.image.array[0: npix//2, npix//2:] = binned_image_arr
1009 else:
1010 # This should be impossible.
1011 raise ValueError("Illegal pixel sub index")
1013 # Erase the previous exposure.
1014 if order < max_order:
1015 exposures[(band, order)].image.array[:, :] = np.nan
1017 # Write the properties files and MOCs.
1018 if not do_color:
1019 for band in bands:
1020 band_pixels = np.array([pixel
1021 for pixel, band_ in hips_exposure_handle_dict.keys()
1022 if band_ == band])
1023 band_pixels = np.sort(band_pixels)
1025 self._write_properties_and_moc(
1026 hips_base_path.join(f"band_{band}", forceDirectory=True),
1027 max_order,
1028 band_pixels,
1029 exp0,
1030 shift_order,
1031 band,
1032 False,
1033 )
1034 self._write_allsky_file(
1035 hips_base_path.join(f"band_{band}", forceDirectory=True),
1036 min_order,
1037 )
1038 else:
1039 self._write_properties_and_moc(
1040 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
1041 max_order,
1042 pixels[:-1],
1043 exp0,
1044 shift_order,
1045 colorstr,
1046 True,
1047 )
1048 self._write_allsky_file(
1049 hips_base_path.join(f"color_{colorstr}", forceDirectory=True),
1050 min_order,
1051 )
1053 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9):
1054 """Write a HiPS image.
1056 Parameters
1057 ----------
1058 hips_base_path : `lsst.resources.ResourcePath`
1059 Resource path to the base of the HiPS directory tree.
1060 order : `int`
1061 HEALPix order of the HiPS image to write.
1062 pixel : `int`
1063 HEALPix pixel of the HiPS image.
1064 image : `lsst.afw.image.Image`
1065 Image to write.
1066 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1067 Mapping to convert image to scaled png.
1068 shift_order : `int`, optional
1069 HPX shift_order.
1070 """
1071 # WARNING: In general PipelineTasks are not allowed to do any outputs
1072 # outside of the butler. This task has been given (temporary)
1073 # Special Dispensation because of the nature of HiPS outputs until
1074 # a more controlled solution can be found.
1076 dir_number = self._get_dir_number(pixel)
1077 hips_dir = hips_base_path.join(
1078 f"Norder{order}",
1079 forceDirectory=True
1080 ).join(
1081 f"Dir{dir_number}",
1082 forceDirectory=True
1083 )
1085 wcs = makeHpxWcs(order, pixel, shift_order=shift_order)
1087 uri = hips_dir.join(f"Npix{pixel}.fits")
1089 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1090 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata())
1092 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1094 # And make a grayscale png as well
1096 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8)
1097 vals[~np.isfinite(image.array) | (image.array < 0)] = 0
1098 im = Image.fromarray(vals[::-1, :], "L")
1100 uri = hips_dir.join(f"Npix{pixel}.png")
1102 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1103 im.save(temporary_uri.ospath)
1105 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1107 def _write_hips_color_png(
1108 self,
1109 hips_base_path,
1110 order,
1111 pixel,
1112 image_red,
1113 image_green,
1114 image_blue,
1115 png_mapping,
1116 ):
1117 """Write a color png HiPS image.
1119 Parameters
1120 ----------
1121 hips_base_path : `lsst.resources.ResourcePath`
1122 Resource path to the base of the HiPS directory tree.
1123 order : `int`
1124 HEALPix order of the HiPS image to write.
1125 pixel : `int`
1126 HEALPix pixel of the HiPS image.
1127 image_red : `lsst.afw.image.Image`
1128 Input for red channel of output png.
1129 image_green : `lsst.afw.image.Image`
1130 Input for green channel of output png.
1131 image_blue : `lsst.afw.image.Image`
1132 Input for blue channel of output png.
1133 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1134 Mapping to convert image to scaled png.
1135 """
1136 # WARNING: In general PipelineTasks are not allowed to do any outputs
1137 # outside of the butler. This task has been given (temporary)
1138 # Special Dispensation because of the nature of HiPS outputs until
1139 # a more controlled solution can be found.
1141 dir_number = self._get_dir_number(pixel)
1142 hips_dir = hips_base_path.join(
1143 f"Norder{order}",
1144 forceDirectory=True
1145 ).join(
1146 f"Dir{dir_number}",
1147 forceDirectory=True
1148 )
1150 # We need to convert nans to the minimum values in the mapping.
1151 arr_red = image_red.array.copy()
1152 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0]
1153 arr_green = image_green.array.copy()
1154 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1]
1155 arr_blue = image_blue.array.copy()
1156 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2]
1158 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue)
1160 im = Image.fromarray(image_array[::-1, :, :], mode="RGB")
1162 uri = hips_dir.join(f"Npix{pixel}.png")
1164 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1165 im.save(temporary_uri.ospath)
1167 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1169 def _write_properties_and_moc(
1170 self,
1171 hips_base_path,
1172 max_order,
1173 pixels,
1174 exposure,
1175 shift_order,
1176 band,
1177 multiband
1178 ):
1179 """Write HiPS properties file and MOC.
1181 Parameters
1182 ----------
1183 hips_base_path : : `lsst.resources.ResourcePath`
1184 Resource path to the base of the HiPS directory tree.
1185 max_order : `int`
1186 Maximum HEALPix order.
1187 pixels : `np.ndarray` (N,)
1188 Array of pixels used.
1189 exposure : `lsst.afw.image.Exposure`
1190 Sample HPX exposure used for generating HiPS tiles.
1191 shift_order : `int`
1192 HPX shift order.
1193 band : `str`
1194 Band (or color).
1195 multiband : `bool`
1196 Is band multiband / color?
1197 """
1198 area = hpg.nside_to_pixel_area(2**max_order, degrees=True)*len(pixels)
1200 initial_ra = self.config.properties.initial_ra
1201 initial_dec = self.config.properties.initial_dec
1202 initial_fov = self.config.properties.initial_fov
1204 if initial_ra is None or initial_dec is None or initial_fov is None:
1205 # We want to point to an arbitrary pixel in the footprint.
1206 # Just take the median pixel value for simplicity.
1207 temp_pixels = pixels.copy()
1208 if temp_pixels.size % 2 == 0:
1209 temp_pixels = np.append(temp_pixels, [temp_pixels[0]])
1210 medpix = int(np.median(temp_pixels))
1211 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix)
1212 _initial_fov = hpg.nside_to_resolution(2**max_order, units='arcminutes')/60.
1214 if initial_ra is None or initial_dec is None:
1215 initial_ra = _initial_ra
1216 initial_dec = _initial_dec
1217 if initial_fov is None:
1218 initial_fov = _initial_fov
1220 self._write_hips_properties_file(
1221 hips_base_path,
1222 self.config.properties,
1223 band,
1224 multiband,
1225 exposure,
1226 max_order,
1227 shift_order,
1228 area,
1229 initial_ra,
1230 initial_dec,
1231 initial_fov,
1232 )
1234 # Write the MOC coverage
1235 self._write_hips_moc_file(
1236 hips_base_path,
1237 max_order,
1238 pixels,
1239 )
1241 def _write_hips_properties_file(
1242 self,
1243 hips_base_path,
1244 properties_config,
1245 band,
1246 multiband,
1247 exposure,
1248 max_order,
1249 shift_order,
1250 area,
1251 initial_ra,
1252 initial_dec,
1253 initial_fov
1254 ):
1255 """Write HiPS properties file.
1257 Parameters
1258 ----------
1259 hips_base_path : `lsst.resources.ResourcePath`
1260 ResourcePath at top of HiPS tree. File will be written
1261 to this path as ``properties``.
1262 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig`
1263 Configuration for properties values.
1264 band : `str`
1265 Name of band(s) for HiPS tree.
1266 multiband : `bool`
1267 Is multiband / color?
1268 exposure : `lsst.afw.image.Exposure`
1269 Sample HPX exposure used for generating HiPS tiles.
1270 max_order : `int`
1271 Maximum HEALPix order.
1272 shift_order : `int`
1273 HPX shift order.
1274 area : `float`
1275 Coverage area in square degrees.
1276 initial_ra : `float`
1277 Initial HiPS RA position (degrees).
1278 initial_dec : `float`
1279 Initial HiPS Dec position (degrees).
1280 initial_fov : `float`
1281 Initial HiPS display size (degrees).
1282 """
1283 # WARNING: In general PipelineTasks are not allowed to do any outputs
1284 # outside of the butler. This task has been given (temporary)
1285 # Special Dispensation because of the nature of HiPS outputs until
1286 # a more controlled solution can be found.
1287 def _write_property(fh, name, value):
1288 """Write a property name/value to a file handle.
1290 Parameters
1291 ----------
1292 fh : file handle (blah)
1293 Open for writing.
1294 name : `str`
1295 Name of property
1296 value : `str`
1297 Value of property
1298 """
1299 # This ensures that the name has no spaces or space-like characters,
1300 # per the HiPS standard.
1301 if re.search(r"\s", name):
1302 raise ValueError(f"``{name}`` cannot contain any space characters.")
1303 if "=" in name:
1304 raise ValueError(f"``{name}`` cannot contain an ``=``")
1306 fh.write(f"{name:25}= {value}\n")
1308 if exposure.image.array.dtype == np.dtype("float32"):
1309 bitpix = -32
1310 elif exposure.image.array.dtype == np.dtype("float64"):
1311 bitpix = -64
1312 elif exposure.image.array.dtype == np.dtype("int32"):
1313 bitpix = 32
1315 date_iso8601 = datetime.utcnow().isoformat(timespec="seconds") + "Z"
1316 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units='degrees')
1318 uri = hips_base_path.join("properties")
1319 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1320 with open(temporary_uri.ospath, "w") as fh:
1321 _write_property(
1322 fh,
1323 "creator_did",
1324 properties_config.creator_did_template.format(band=band),
1325 )
1326 if properties_config.obs_collection is not None:
1327 _write_property(fh, "obs_collection", properties_config.obs_collection)
1328 _write_property(
1329 fh,
1330 "obs_title",
1331 properties_config.obs_title_template.format(band=band),
1332 )
1333 if properties_config.obs_description_template is not None:
1334 _write_property(
1335 fh,
1336 "obs_description",
1337 properties_config.obs_description_template.format(band=band),
1338 )
1339 if len(properties_config.prov_progenitor) > 0:
1340 for prov_progenitor in properties_config.prov_progenitor:
1341 _write_property(fh, "prov_progenitor", prov_progenitor)
1342 if properties_config.obs_ack is not None:
1343 _write_property(fh, "obs_ack", properties_config.obs_ack)
1344 _write_property(fh, "obs_regime", "Optical")
1345 _write_property(fh, "data_pixel_bitpix", str(bitpix))
1346 _write_property(fh, "dataproduct_type", "image")
1347 _write_property(fh, "moc_sky_fraction", str(area/41253.))
1348 _write_property(fh, "data_ucd", "phot.flux")
1349 _write_property(fh, "hips_creation_date", date_iso8601)
1350 _write_property(fh, "hips_builder", "lsst.pipe.tasks.hips.GenerateHipsTask")
1351 _write_property(fh, "hips_creator", "Vera C. Rubin Observatory")
1352 _write_property(fh, "hips_version", "1.4")
1353 _write_property(fh, "hips_release_date", date_iso8601)
1354 _write_property(fh, "hips_frame", "equatorial")
1355 _write_property(fh, "hips_order", str(max_order))
1356 _write_property(fh, "hips_tile_width", str(exposure.getBBox().getWidth()))
1357 _write_property(fh, "hips_status", "private master clonableOnce")
1358 if multiband:
1359 _write_property(fh, "hips_tile_format", "png")
1360 _write_property(fh, "dataproduct_subtype", "color")
1361 else:
1362 _write_property(fh, "hips_tile_format", "png fits")
1363 _write_property(fh, "hips_pixel_bitpix", str(bitpix))
1364 _write_property(fh, "hips_pixel_scale", str(pixel_scale))
1365 _write_property(fh, "hips_initial_ra", str(initial_ra))
1366 _write_property(fh, "hips_initial_dec", str(initial_dec))
1367 _write_property(fh, "hips_initial_fov", str(initial_fov))
1368 if multiband:
1369 if self.config.blue_channel_band in properties_config.spectral_ranges:
1370 em_min = properties_config.spectral_ranges[
1371 self.config.blue_channel_band
1372 ].lambda_min/1e9
1373 else:
1374 self.log.warning("blue band %s not in self.config.spectral_ranges.", band)
1375 em_min = 3e-7
1376 if self.config.red_channel_band in properties_config.spectral_ranges:
1377 em_max = properties_config.spectral_ranges[
1378 self.config.red_channel_band
1379 ].lambda_max/1e9
1380 else:
1381 self.log.warning("red band %s not in self.config.spectral_ranges.", band)
1382 em_max = 1e-6
1383 else:
1384 if band in properties_config.spectral_ranges:
1385 em_min = properties_config.spectral_ranges[band].lambda_min/1e9
1386 em_max = properties_config.spectral_ranges[band].lambda_max/1e9
1387 else:
1388 self.log.warning("band %s not in self.config.spectral_ranges.", band)
1389 em_min = 3e-7
1390 em_max = 1e-6
1391 _write_property(fh, "em_min", str(em_min))
1392 _write_property(fh, "em_max", str(em_max))
1393 if properties_config.t_min is not None:
1394 _write_property(fh, "t_min", properties_config.t_min)
1395 if properties_config.t_max is not None:
1396 _write_property(fh, "t_max", properties_config.t_max)
1398 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1400 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1):
1401 """Write HiPS MOC file.
1403 Parameters
1404 ----------
1405 hips_base_path : `lsst.resources.ResourcePath`
1406 ResourcePath to top of HiPS tree. File will be written as
1407 to this path as ``Moc.fits``.
1408 max_order : `int`
1409 Maximum HEALPix order.
1410 pixels : `np.ndarray`
1411 Array of pixels covered.
1412 min_uniq_order : `int`, optional
1413 Minimum HEALPix order for looking for fully covered pixels.
1414 """
1415 # WARNING: In general PipelineTasks are not allowed to do any outputs
1416 # outside of the butler. This task has been given (temporary)
1417 # Special Dispensation because of the nature of HiPS outputs until
1418 # a more controlled solution can be found.
1420 # Make the initial list of UNIQ pixels
1421 uniq = 4*(4**max_order) + pixels
1423 # Make a healsparse map which provides easy degrade/comparisons.
1424 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32)
1425 hspmap[pixels] = 1.0
1427 # Loop over orders, degrade each time, and look for pixels with full coverage.
1428 for uniq_order in range(max_order - 1, min_uniq_order - 1, -1):
1429 hspmap = hspmap.degrade(2**uniq_order, reduction="sum")
1430 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order))
1431 # Check if any of the pixels at uniq_order have full coverage.
1432 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero()
1433 if covered.size == 0:
1434 # No pixels at uniq_order are fully covered, we're done.
1435 break
1436 # Replace the UNIQ pixels that are fully covered.
1437 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered]
1439 # Remove duplicate pixels.
1440 uniq = np.unique(uniq)
1442 # Output to fits.
1443 tbl = np.zeros(uniq.size, dtype=[("UNIQ", "i8")])
1444 tbl["UNIQ"] = uniq
1446 order = np.log2(tbl["UNIQ"]//4).astype(np.int32)//2
1447 moc_order = np.max(order)
1449 hdu = fits.BinTableHDU(tbl)
1450 hdu.header["PIXTYPE"] = "HEALPIX"
1451 hdu.header["ORDERING"] = "NUNIQ"
1452 hdu.header["COORDSYS"] = "C"
1453 hdu.header["MOCORDER"] = moc_order
1454 hdu.header["MOCTOOL"] = "lsst.pipe.tasks.hips.GenerateHipsTask"
1456 uri = hips_base_path.join("Moc.fits")
1458 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1459 hdu.writeto(temporary_uri.ospath)
1461 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1463 def _write_allsky_file(self, hips_base_path, allsky_order):
1464 """Write an Allsky.png file.
1466 Parameters
1467 ----------
1468 hips_base_path : `lsst.resources.ResourcePath`
1469 Resource path to the base of the HiPS directory tree.
1470 allsky_order : `int`
1471 HEALPix order of the minimum order to make allsky file.
1472 """
1473 tile_size = self.config.allsky_tilesize
1474 n_tiles_per_side = int(np.sqrt(hpg.nside_to_npixel(hpg.order_to_nside(allsky_order))))
1476 allsky_image = None
1478 allsky_order_uri = hips_base_path.join(f"Norder{allsky_order}", forceDirectory=True)
1479 pixel_regex = re.compile(r"Npix([0-9]+)\.png$")
1480 png_uris = list(
1481 ResourcePath.findFileResources(
1482 candidates=[allsky_order_uri],
1483 file_filter=pixel_regex,
1484 )
1485 )
1487 for png_uri in png_uris:
1488 matches = re.match(pixel_regex, png_uri.basename())
1489 pix_num = int(matches.group(1))
1490 tile_image = Image.open(io.BytesIO(png_uri.read()))
1491 row = math.floor(pix_num//n_tiles_per_side)
1492 column = pix_num % n_tiles_per_side
1493 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size)
1494 tile_image_shrunk = tile_image.resize((tile_size, tile_size))
1496 if allsky_image is None:
1497 allsky_image = Image.new(
1498 tile_image.mode,
1499 (n_tiles_per_side*tile_size, n_tiles_per_side*tile_size),
1500 )
1501 allsky_image.paste(tile_image_shrunk, box)
1503 uri = allsky_order_uri.join("Allsky.png")
1505 with ResourcePath.temporary_uri(suffix=uri.getExtension()) as temporary_uri:
1506 allsky_image.save(temporary_uri.ospath)
1508 uri.transfer_from(temporary_uri, transfer="copy", overwrite=True)
1510 def _get_dir_number(self, pixel):
1511 """Compute the directory number from a pixel.
1513 Parameters
1514 ----------
1515 pixel : `int`
1516 HEALPix pixel number.
1518 Returns
1519 -------
1520 dir_number : `int`
1521 HiPS directory number.
1522 """
1523 return (pixel//10000)*10000
1526class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections,
1527 dimensions=("instrument", ),
1528 defaultTemplates={"coaddName": "deep"}):
1529 hips_exposure_handles = pipeBase.connectionTypes.Input(
1530 doc="HiPS-compatible HPX images.",
1531 name="{coaddName}Coadd_hpx",
1532 storageClass="ExposureF",
1533 dimensions=("healpix11", "band"),
1534 multiple=True,
1535 deferLoad=True,
1536 )
1539class GenerateColorHipsConfig(GenerateHipsConfig,
1540 pipelineConnections=GenerateColorHipsConnections):
1541 """Configuration parameters for GenerateColorHipsTask."""
1542 blue_channel_band = pexConfig.Field(
1543 doc="Band to use for blue channel of color pngs.",
1544 dtype=str,
1545 default="g",
1546 )
1547 green_channel_band = pexConfig.Field(
1548 doc="Band to use for green channel of color pngs.",
1549 dtype=str,
1550 default="r",
1551 )
1552 red_channel_band = pexConfig.Field(
1553 doc="Band to use for red channel of color pngs.",
1554 dtype=str,
1555 default="i",
1556 )
1557 png_color_asinh_minimum = pexConfig.Field(
1558 doc="AsinhMapping intensity to be mapped to black for color png scaling.",
1559 dtype=float,
1560 default=0.0,
1561 )
1562 png_color_asinh_stretch = pexConfig.Field(
1563 doc="AsinhMapping linear stretch for color png scaling.",
1564 dtype=float,
1565 default=5.0,
1566 )
1567 png_color_asinh_softening = pexConfig.Field(
1568 doc="AsinhMapping softening parameter (Q) for color png scaling.",
1569 dtype=float,
1570 default=8.0,
1571 )
1574class GenerateColorHipsTask(GenerateHipsTask):
1575 """Task for making a HiPS tree with color pngs."""
1576 ConfigClass = GenerateColorHipsConfig
1577 _DefaultName = "generateColorHips"
1578 color_task = True
1580 def _check_data_bands(self, data_bands):
1581 """Check the data for configured bands.
1583 Warn if any color bands are missing data.
1585 Parameters
1586 ----------
1587 data_bands : `set` [`str`]
1588 Bands from the input data.
1590 Returns
1591 -------
1592 bands : `list` [`str`]
1593 List of bands in bgr color order.
1594 """
1595 if len(data_bands) == 0:
1596 raise RuntimeError("GenerateColorHipsTask must have data from at least one band.")
1598 if self.config.blue_channel_band not in data_bands:
1599 self.log.warning(
1600 "Color png blue_channel_band %s not in dataset.",
1601 self.config.blue_channel_band
1602 )
1603 if self.config.green_channel_band not in data_bands:
1604 self.log.warning(
1605 "Color png green_channel_band %s not in dataset.",
1606 self.config.green_channel_band
1607 )
1608 if self.config.red_channel_band not in data_bands:
1609 self.log.warning(
1610 "Color png red_channel_band %s not in dataset.",
1611 self.config.red_channel_band
1612 )
1614 bands = [
1615 self.config.blue_channel_band,
1616 self.config.green_channel_band,
1617 self.config.red_channel_band,
1618 ]
1620 return bands