22"""Tasks for making and manipulating HIPS images."""
24__all__ = [
"HighResolutionHipsTask",
"HighResolutionHipsConfig",
"HighResolutionHipsConnections",
25 "GenerateHipsTask",
"GenerateHipsConfig",
"GenerateColorHipsTask",
"GenerateColorHipsConfig"]
27from collections
import defaultdict
35from datetime
import datetime
37import healsparse
as hsp
38from astropy.io
import fits
39from astropy.visualization.lupton_rgb
import AsinhMapping
43from lsst.utils.timer
import timeMethod
44from lsst.daf.butler
import Butler, DataCoordinate, DatasetRef, Quantum, SkyPixDimension
46import lsst.pipe.base
as pipeBase
52from lsst.resources
import ResourcePath
56 dimensions=(
"healpix9",
"band"),
57 defaultTemplates={
"coaddName":
"deep"}):
58 coadd_exposure_handles = pipeBase.connectionTypes.Input(
59 doc=
"Coadded exposures to convert to HIPS format.",
60 name=
"{coaddName}Coadd_calexp",
61 storageClass=
"ExposureF",
62 dimensions=(
"tract",
"patch",
"skymap",
"band"),
66 hips_exposures = pipeBase.connectionTypes.Output(
67 doc=
"HiPS-compatible HPX image.",
68 name=
"{coaddName}Coadd_hpx",
69 storageClass=
"ExposureF",
70 dimensions=(
"healpix11",
"band"),
74 def __init__(self, *, config=None):
75 super().__init__(config=config)
78 for dim
in self.dimensions:
80 if quantum_order
is not None:
81 raise ValueError(
"Must not specify more than one quantum healpix dimension.")
82 quantum_order = int(dim.split(
"healpix")[1])
83 if quantum_order
is None:
84 raise ValueError(
"Must specify a healpix dimension in quantum dimensions.")
86 if quantum_order > config.hips_order:
87 raise ValueError(
"Quantum healpix dimension order must not be greater than hips_order")
90 for dim
in self.hips_exposures.dimensions:
93 raise ValueError(
"Must not specify more than one healpix dimension.")
94 order = int(dim.split(
"healpix")[1])
96 raise ValueError(
"Must specify a healpix dimension in hips_exposure dimensions.")
98 if order != config.hips_order:
99 raise ValueError(
"healpix dimension order must match config.hips_order.")
102class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig,
103 pipelineConnections=HighResolutionHipsConnections):
104 """Configuration parameters for HighResolutionHipsTask.
108 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to
109 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix
110 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which
111 defines the target resolution of the HiPS image. The IVOA recommends
112 shift_order=9, for 2**9=512 pixels on a side.
115 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf
116 shows the relationship between hips_order, number of tiles (full
117 sky coverage), cell size, and sub-pixel size/image resolution (with
118 the default shift_order=9):
119 +------------+-----------------+--------------+------------------+
120 | hips_order | Number of Tiles | Cell Size | Image Resolution |
121 +============+=================+==============+==================+
122 | 0 | 12 | 58.63 deg | 6.871 arcmin |
123 | 1 | 48 | 29.32 deg | 3.435 arcmin |
124 | 2 | 192 | 14.66 deg | 1.718 arcmin |
125 | 3 | 768 | 7.329 deg | 51.53 arcsec |
126 | 4 | 3072 | 3.665 deg | 25.77 arcsec |
127 | 5 | 12288 | 1.832 deg | 12.88 arcsec |
128 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec |
129 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec |
130 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec |
131 | 9 | 3145728 | 6.871 arcmin | 805.2mas |
132 | 10 | 12582912 | 3.435 arcmin | 402.6mas |
133 | 11 | 50331648 | 1.718 arcmin | 201.3mas |
134 | 12 | 201326592 | 51.53 arcsec | 100.6mas |
135 | 13 | 805306368 | 25.77 arcsec | 50.32mas |
136 +------------+-----------------+--------------+------------------+
138 hips_order = pexConfig.Field(
139 doc=
"HIPS image order.",
143 shift_order = pexConfig.Field(
144 doc=
"HIPS shift order (such that each tile is 2**shift_order pixels on a side)",
148 warp = pexConfig.ConfigField(
149 dtype=afwMath.Warper.ConfigClass,
150 doc=
"Warper configuration",
153 def setDefaults(self):
154 self.warp.warpingKernelName =
"lanczos5"
157class HipsTaskNameDescriptor:
158 """Descriptor used create a DefaultName that matches the order of
159 the defined dimensions in the connections class.
164 The prefix of the Default name, to which the order will be
167 def __init__(self, prefix):
169 self._defaultName = f
"{prefix}{{}}"
172 def __get__(self, obj, klass=None):
175 "HipsTaskDescriptor was used in an unexpected context"
177 if self._order
is None:
178 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions
179 for dim
in klassDimensions:
180 if (match := re.match(
r"^healpix(\d*)$", dim))
is not None:
181 self._order = int(match.group(1))
185 "Could not find healpix dimension in connections class"
187 return self._defaultName.format(self._order)
190class HighResolutionHipsTask(pipeBase.PipelineTask):
191 """Task for making high resolution HiPS images."""
192 ConfigClass = HighResolutionHipsConfig
193 _DefaultName = HipsTaskNameDescriptor(
"highResolutionHips")
195 def __init__(self, **kwargs):
196 super().__init__(**kwargs)
197 self.warper = afwMath.Warper.fromConfig(self.config.warp)
200 def runQuantum(self, butlerQC, inputRefs, outputRefs):
201 inputs = butlerQC.get(inputRefs)
203 healpix_dim = f
"healpix{self.config.hips_order}"
205 pixels = [hips_exposure.dataId[healpix_dim]
206 for hips_exposure
in outputRefs.hips_exposures]
208 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs[
"coadd_exposure_handles"])
210 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]:
211 hips_exposure_ref
for hips_exposure_ref
in outputRefs.hips_exposures}
212 for pixel, hips_exposure
in outputs.hips_exposures.items():
213 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel])
215 def run(self, pixels, coadd_exposure_handles):
216 """Run the HighResolutionHipsTask.
220 pixels : `Iterable` [ `int` ]
221 Iterable of healpix pixels (nest ordering) to warp to.
222 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`]
223 Handles for the coadd exposures.
227 outputs : `lsst.pipe.base.Struct`
228 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value)
230 self.log.info(
"Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order)
232 npix = 2**self.config.shift_order
242 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order)
243 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx)
244 exp_hpx_dict[pixel] = exp_hpx
245 warp_dict[pixel] = []
250 for handle
in coadd_exposure_handles:
251 coadd_exp = handle.get()
255 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx)
257 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs())
258 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask(
"NO_DATA"), np.nan)
263 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict())
264 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter())
265 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib())
267 if warped.getBBox().getArea() == 0
or not np.any(np.isfinite(warped.image.array)):
270 "No overlap between output HPX %d and input exposure %s",
276 exp.maskedImage.assign(warped.maskedImage, warped.getBBox())
277 warp_dict[pixel].append(exp.maskedImage)
281 stats_flags = afwMath.stringToStatisticsProperty(
"MEAN")
282 stats_ctrl = afwMath.StatisticsControl()
283 stats_ctrl.setNanSafe(
True)
284 stats_ctrl.setWeighted(
True)
285 stats_ctrl.setCalcErrorFromInputVariance(
True)
291 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask(
"NO_DATA"), np.nan)
293 if not warp_dict[pixel]:
295 self.log.debug(
"No data in HPX pixel %d", pixel)
298 exp_hpx_dict.pop(pixel)
301 exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack(
305 [1.0]*len(warp_dict[pixel]),
310 return pipeBase.Struct(hips_exposures=exp_hpx_dict)
313 def build_quantum_graph_cli(cls, argv):
314 """A command-line interface entry point to `build_quantum_graph`.
315 This method provides the implementation for the
316 ``build-high-resolution-hips-qg`` script.
320 argv : `Sequence` [ `str` ]
321 Command-line arguments (e.g. ``sys.argv[1:]``).
323 parser = cls._make_cli_parser()
325 args = parser.parse_args(argv)
327 if args.subparser_name
is None:
331 pipeline = pipeBase.Pipeline.from_uri(args.pipeline)
332 expanded_pipeline = list(pipeline.toExpandedPipeline())
334 if len(expanded_pipeline) != 1:
335 raise RuntimeError(f
"Pipeline file {args.pipeline} may only contain one task.")
337 (task_def,) = expanded_pipeline
339 butler = Butler(args.butler_config, collections=args.input)
341 if args.subparser_name ==
"segment":
343 hpix_pixelization = HealpixPixelization(level=args.hpix_build_order)
344 dataset = task_def.connections.coadd_exposure_handles.name
345 data_ids = set(butler.registry.queryDataIds(
"tract", datasets=dataset).expanded())
347 for data_id
in data_ids:
348 region = data_id.region
349 pixel_range = hpix_pixelization.envelope(region)
350 for r
in pixel_range.ranges():
351 region_pixels.extend(range(r[0], r[1]))
352 indices = np.unique(region_pixels)
354 print(f
"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:")
355 for pixel
in indices:
358 elif args.subparser_name ==
"build":
362 if args.output_run
is None:
363 if args.output
is None:
364 raise ValueError(
"At least one of --output or --output-run options is required.")
365 args.output_run =
"{}/{}".format(args.output, pipeBase.Instrument.makeCollectionTimestamp())
367 build_ranges =
RangeSet(sorted(args.pixels))
372 "butler_argument": args.butler_config,
373 "output": args.output,
374 "output_run": args.output_run,
375 "data_query": args.where,
376 "time": f
"{datetime.now()}",
379 qg = cls.build_quantum_graph(
382 args.hpix_build_order,
385 collections=args.input,
388 qg.saveUri(args.save_qgraph)
391 def _make_cli_parser(cls):
392 """Make the command-line parser.
396 parser : `argparse.ArgumentParser`
398 parser = argparse.ArgumentParser(
400 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets."
403 subparsers = parser.add_subparsers(help=
"sub-command help", dest=
"subparser_name")
405 parser_segment = subparsers.add_parser(
"segment",
406 help=
"Determine survey segments for workflow.")
407 parser_build = subparsers.add_parser(
"build",
408 help=
"Build quantum graph for HighResolutionHipsTask")
410 for sub
in [parser_segment, parser_build]:
416 help=
"Path to data repository or butler configuration.",
423 help=
"Pipeline file, limited to one task.",
431 help=
"Input collection(s) to search for coadd exposures.",
436 "--hpix_build_order",
439 help=
"HEALPix order to segment sky for building quantum graph files.",
446 help=
"Data ID expression used when querying for input coadd datasets.",
449 parser_build.add_argument(
453 "Name of the output CHAINED collection. If this options is specified and "
454 "--output-run is not, then a new RUN collection will be created by appending "
455 "a timestamp to the value of this option."
460 parser_build.add_argument(
464 "Output RUN collection to write resulting images. If not provided "
465 "then --output must be provided and a new RUN collection will be created "
466 "by appending a timestamp to the value passed with --output."
471 parser_build.add_argument(
475 help=
"Output filename for QuantumGraph.",
478 parser_build.add_argument(
483 help=
"Pixels at --hpix_build_order to generate quantum graph.",
490 def build_quantum_graph(
500 """Generate a `QuantumGraph` for running just this task.
502 This is a temporary workaround for incomplete butler query support for
507 task_def : `lsst.pipe.base.TaskDef`
509 registry : `lsst.daf.butler.Registry`
510 Client for the butler database. May be read-only.
511 constraint_order : `int`
512 HEALPix order used to contrain which quanta are generated, via
513 ``constraint_indices``. This should be a coarser grid (smaller
514 order) than the order used for the task's quantum and output data
515 IDs, and ideally something between the spatial scale of a patch or
516 the data repository's "common skypix" system (usually ``htm7``).
517 constraint_ranges : `lsst.sphgeom.RangeSet`
518 RangeSet which describes constraint pixels (HEALPix NEST, with order
519 constraint_order) to constrain generated quanta.
520 where : `str`, optional
521 A boolean `str` expression of the form accepted by
522 `Registry.queryDatasets` to constrain input datasets. This may
523 contain a constraint on tracts, patches, or bands, but not HEALPix
524 indices. Constraints on tracts and patches should usually be
525 unnecessary, however - existing coadds that overlap the given
526 HEALpix indices will be selected without such a constraint, and
527 providing one may reject some that should normally be included.
528 collections : `str` or `Iterable` [ `str` ], optional
529 Collection or collections to search for input datasets, in order.
530 If not provided, ``registry.defaults.collections`` will be
532 metadata : `dict` [ `str`, `Any` ]
533 Graph metadata. It is required to contain "output_run" key with the
534 name of the output RUN collection.
536 config = task_def.config
538 dataset_types = pipeBase.PipelineDatasetTypes.fromPipeline(pipeline=[task_def], registry=registry)
541 (input_dataset_type,) = dataset_types.inputs
546 output_dataset_type = dataset_types.outputs[task_def.connections.hips_exposures.name]
547 incidental_output_dataset_types = dataset_types.outputs.copy()
548 incidental_output_dataset_types.remove(output_dataset_type)
549 (hpx_output_dimension,) = (d
for d
in output_dataset_type.dimensions
550 if isinstance(d, SkyPixDimension))
552 constraint_hpx_pixelization = registry.dimensions[f
"healpix{constraint_order}"].pixelization
553 common_skypix_name = registry.dimensions.commonSkyPix.name
554 common_skypix_pixelization = registry.dimensions.commonSkyPix.pixelization
557 task_dimensions = registry.dimensions.extract(task_def.connections.dimensions)
558 (hpx_dimension,) = (d
for d
in task_dimensions
if d.name !=
"band")
559 hpx_pixelization = hpx_dimension.pixelization
561 if hpx_pixelization.level < constraint_order:
562 raise ValueError(f
"Quantum order {hpx_pixelization.level} must be < {constraint_order}")
563 hpx_ranges = constraint_ranges.scaled(4**(hpx_pixelization.level - constraint_order))
568 for begin, end
in constraint_ranges:
569 for hpx_index
in range(begin, end):
570 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index)
571 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region)
575 for simp
in range(1, 10):
576 if len(common_skypix_ranges) < 100:
578 common_skypix_ranges.simplify(simp)
585 for n, (begin, end)
in enumerate(common_skypix_ranges):
588 where_terms.append(f
"{common_skypix_name} = cpx{n}")
589 bind[f
"cpx{n}"] = begin
591 where_terms.append(f
"({common_skypix_name} >= cpx{n}a AND {common_skypix_name} <= cpx{n}b)")
592 bind[f
"cpx{n}a"] = begin
593 bind[f
"cpx{n}b"] = stop
595 where =
" OR ".join(where_terms)
597 where = f
"({where}) AND ({' OR '.join(where_terms)})"
601 input_refs = registry.queryDatasets(
605 collections=collections,
608 inputs_by_patch = defaultdict(set)
609 patch_dimensions = registry.dimensions.extract([
"patch"])
610 for input_ref
in input_refs:
611 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(input_ref)
612 if not inputs_by_patch:
613 message_body =
"\n".join(input_refs.explain_no_results())
614 raise RuntimeError(f
"No inputs found:\n{message_body}")
619 inputs_by_hpx = defaultdict(set)
620 for patch_data_id, input_refs_for_patch
in inputs_by_patch.items():
621 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region)
622 for begin, end
in patch_hpx_ranges & hpx_ranges:
623 for hpx_index
in range(begin, end):
624 inputs_by_hpx[hpx_index].update(input_refs_for_patch)
627 output_run = metadata[
"output_run"]
628 for hpx_index, input_refs_for_hpx_index
in inputs_by_hpx.items():
630 input_refs_by_band = defaultdict(list)
631 for input_ref
in input_refs_for_hpx_index:
632 input_refs_by_band[input_ref.dataId[
"band"]].append(input_ref)
634 for band, input_refs_for_band
in input_refs_by_band.items():
635 data_id = registry.expandDataId({hpx_dimension: hpx_index,
"band": band})
637 hpx_pixel_ranges =
RangeSet(hpx_index)
638 hpx_output_ranges = hpx_pixel_ranges.scaled(4**(config.hips_order - hpx_pixelization.level))
640 for begin, end
in hpx_output_ranges:
641 for hpx_output_index
in range(begin, end):
642 output_data_ids.append(
643 registry.expandDataId({hpx_output_dimension: hpx_output_index,
"band": band})
646 dt: [DatasetRef(dt, data_id, run=output_run)]
for dt
in incidental_output_dataset_types
648 outputs[output_dataset_type] = [DatasetRef(output_dataset_type, data_id, run=output_run)
649 for data_id
in output_data_ids]
652 taskName=task_def.taskName,
653 taskClass=task_def.taskClass,
656 inputs={input_dataset_type: input_refs_for_band},
662 raise RuntimeError(
"Given constraints yielded empty quantum graph.")
665 empty_data_id = DataCoordinate.makeEmpty(registry.dimensions)
667 global_init_outputs = []
668 if config_dataset_type := dataset_types.initOutputs.get(task_def.configDatasetName):
669 init_outputs[task_def] = [DatasetRef(config_dataset_type, empty_data_id, run=output_run)]
670 packages_dataset_name = pipeBase.PipelineDatasetTypes.packagesDatasetName
671 if packages_dataset_type := dataset_types.initOutputs.get(packages_dataset_name):
672 global_init_outputs.append(DatasetRef(packages_dataset_type, empty_data_id, run=output_run))
674 return pipeBase.QuantumGraph(
675 quanta={task_def: quanta},
676 initOutputs=init_outputs,
677 globalInitOutputs=global_init_outputs,
682class HipsPropertiesSpectralTerm(pexConfig.Config):
683 lambda_min = pexConfig.Field(
684 doc=
"Minimum wavelength (nm)",
687 lambda_max = pexConfig.Field(
688 doc=
"Maximum wavelength (nm)",
693class HipsPropertiesConfig(pexConfig.Config):
694 """Configuration parameters for writing a HiPS properties file."""
695 creator_did_template = pexConfig.Field(
696 doc=(
"Unique identifier of the HiPS - Format: IVOID. "
697 "Use ``{band}`` to substitute the band name."),
701 obs_collection = pexConfig.Field(
702 doc=
"Short name of original data set - Format: one word",
706 obs_description_template = pexConfig.Field(
707 doc=(
"Data set description - Format: free text, longer free text "
708 "description of the dataset. Use ``{band}`` to substitute "
712 prov_progenitor = pexConfig.ListField(
713 doc=
"Provenance of the original data - Format: free text",
717 obs_title_template = pexConfig.Field(
718 doc=(
"Data set title format: free text, but should be short. "
719 "Use ``{band}`` to substitute the band name."),
723 spectral_ranges = pexConfig.ConfigDictField(
724 doc=(
"Mapping from band to lambda_min, lamba_max (nm). May be approximate."),
726 itemtype=HipsPropertiesSpectralTerm,
729 initial_ra = pexConfig.Field(
730 doc=
"Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.",
734 initial_dec = pexConfig.Field(
735 doc=
"Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.",
739 initial_fov = pexConfig.Field(
740 doc=
"Initial field-of-view (deg). If not set will use ~1 healpix tile.",
744 obs_ack = pexConfig.Field(
745 doc=
"Observation acknowledgements (free text).",
749 t_min = pexConfig.Field(
750 doc=
"Time (MJD) of earliest observation included in HiPS",
754 t_max = pexConfig.Field(
755 doc=
"Time (MJD) of latest observation included in HiPS",
763 if self.obs_collection
is not None:
764 if re.search(
r"\s", self.obs_collection):
765 raise ValueError(
"obs_collection cannot contain any space characters.")
767 def setDefaults(self):
770 u_term = HipsPropertiesSpectralTerm()
771 u_term.lambda_min = 330.
772 u_term.lambda_max = 400.
773 self.spectral_ranges[
"u"] = u_term
774 g_term = HipsPropertiesSpectralTerm()
775 g_term.lambda_min = 402.
776 g_term.lambda_max = 552.
777 self.spectral_ranges[
"g"] = g_term
778 r_term = HipsPropertiesSpectralTerm()
779 r_term.lambda_min = 552.
780 r_term.lambda_max = 691.
781 self.spectral_ranges[
"r"] = r_term
782 i_term = HipsPropertiesSpectralTerm()
783 i_term.lambda_min = 691.
784 i_term.lambda_max = 818.
785 self.spectral_ranges[
"i"] = i_term
786 z_term = HipsPropertiesSpectralTerm()
787 z_term.lambda_min = 818.
788 z_term.lambda_max = 922.
789 self.spectral_ranges[
"z"] = z_term
790 y_term = HipsPropertiesSpectralTerm()
791 y_term.lambda_min = 970.
792 y_term.lambda_max = 1060.
793 self.spectral_ranges[
"y"] = y_term
796class GenerateHipsConnections(pipeBase.PipelineTaskConnections,
797 dimensions=(
"instrument",
"band"),
798 defaultTemplates={
"coaddName":
"deep"}):
799 hips_exposure_handles = pipeBase.connectionTypes.Input(
800 doc=
"HiPS-compatible HPX images.",
801 name=
"{coaddName}Coadd_hpx",
802 storageClass=
"ExposureF",
803 dimensions=(
"healpix11",
"band"),
809class GenerateHipsConfig(pipeBase.PipelineTaskConfig,
810 pipelineConnections=GenerateHipsConnections):
811 """Configuration parameters for GenerateHipsTask."""
816 hips_base_uri = pexConfig.Field(
817 doc=
"URI to HiPS base for output.",
821 min_order = pexConfig.Field(
822 doc=
"Minimum healpix order for HiPS tree.",
826 properties = pexConfig.ConfigField(
827 dtype=HipsPropertiesConfig,
828 doc=
"Configuration for properties file.",
830 allsky_tilesize = pexConfig.Field(
832 doc=
"Allsky.png tile size. Must be power of 2.",
835 png_gray_asinh_minimum = pexConfig.Field(
836 doc=
"AsinhMapping intensity to be mapped to black for grayscale png scaling.",
840 png_gray_asinh_stretch = pexConfig.Field(
841 doc=
"AsinhMapping linear stretch for grayscale png scaling.",
845 png_gray_asinh_softening = pexConfig.Field(
846 doc=
"AsinhMapping softening parameter (Q) for grayscale png scaling.",
852class GenerateHipsTask(pipeBase.PipelineTask):
853 """Task for making a HiPS tree with FITS and grayscale PNGs."""
854 ConfigClass = GenerateHipsConfig
855 _DefaultName =
"generateHips"
859 def runQuantum(self, butlerQC, inputRefs, outputRefs):
860 inputs = butlerQC.get(inputRefs)
862 dims = inputRefs.hips_exposure_handles[0].dataId.names
866 order = int(dim.split(
"healpix")[1])
870 raise RuntimeError(
"Could not determine healpix order for input exposures.")
872 hips_exposure_handle_dict = {
873 (hips_exposure_handle.dataId[healpix_dim],
874 hips_exposure_handle.dataId[
"band"]): hips_exposure_handle
875 for hips_exposure_handle
in inputs[
"hips_exposure_handles"]
878 data_bands = {hips_exposure_handle.dataId[
"band"]
879 for hips_exposure_handle
in inputs[
"hips_exposure_handles"]}
880 bands = self._check_data_bands(data_bands)
885 hips_exposure_handle_dict=hips_exposure_handle_dict,
886 do_color=self.color_task,
889 def _check_data_bands(self, data_bands):
890 """Check that the data has only a single band.
894 data_bands : `set` [`str`]
895 Bands from the input data.
899 bands : `list` [`str`]
900 List of single band to process.
904 RuntimeError if there is not exactly one band.
906 if len(data_bands) != 1:
907 raise RuntimeError(
"GenerateHipsTask can only use data from a single band.")
909 return list(data_bands)
912 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False):
913 """Run the GenerateHipsTask.
917 bands : `list [ `str` ]
918 List of bands to be processed (or single band).
920 HEALPix order of the maximum (native) HPX exposures.
921 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`}
922 Dict of handles for the HiPS high-resolution exposures.
923 Key is (pixel number, ``band``).
924 do_color : `bool`, optional
925 Do color pngs instead of per-band grayscale.
927 min_order = self.config.min_order
930 png_grayscale_mapping = AsinhMapping(
931 self.config.png_gray_asinh_minimum,
932 self.config.png_gray_asinh_stretch,
933 Q=self.config.png_gray_asinh_softening,
936 png_color_mapping = AsinhMapping(
937 self.config.png_color_asinh_minimum,
938 self.config.png_color_asinh_stretch,
939 Q=self.config.png_color_asinh_softening,
942 bcb = self.config.blue_channel_band
943 gcb = self.config.green_channel_band
944 rcb = self.config.red_channel_band
945 colorstr = f
"{bcb}{gcb}{rcb}"
948 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=
True)
952 pixels = np.unique(np.array([pixel
for pixel, _
in hips_exposure_handle_dict.keys()]))
955 pixels = np.append(pixels, [0])
959 pixels_shifted[max_order] = pixels
960 for order
in range(max_order - 1, min_order - 1, -1):
961 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2)
964 for order
in range(min_order, max_order + 1):
965 pixels_shifted[order][-1] = -1
968 exp0 = list(hips_exposure_handle_dict.values())[0].get()
969 bbox = exp0.getBBox()
970 npix = bbox.getWidth()
971 shift_order = int(np.round(np.log2(npix)))
978 for order
in range(min_order, max_order + 1):
979 exp = exp0.Factory(bbox=bbox)
980 exp.image.array[:, :] = np.nan
981 exposures[(band, order)] = exp
984 for pixel_counter, pixel
in enumerate(pixels[:-1]):
985 self.log.debug(
"Working on high resolution pixel %d", pixel)
991 if (pixel, band)
in hips_exposure_handle_dict:
992 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get()
998 for order
in range(max_order, min_order - 1, -1):
999 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]:
1007 self._write_hips_image(
1008 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1010 pixels_shifted[order][pixel_counter],
1011 exposures[(band, order)].image,
1012 png_grayscale_mapping,
1013 shift_order=shift_order,
1017 self._write_hips_color_png(
1018 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1020 pixels_shifted[order][pixel_counter],
1021 exposures[(self.config.red_channel_band, order)].image,
1022 exposures[(self.config.green_channel_band, order)].image,
1023 exposures[(self.config.blue_channel_band, order)].image,
1027 log_level = self.log.INFO
if order == (max_order - 3)
else self.log.DEBUG
1030 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)",
1033 pixels_shifted[order][pixel_counter],
1039 if order == min_order:
1041 exposures[(band, order)].image.array[:, :] = np.nan
1046 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2)
1047 with warnings.catch_warnings():
1048 warnings.simplefilter(
"ignore")
1049 binned_image_arr = np.nanmean(arr, axis=(1, 3))
1053 sub_index = (pixels_shifted[order][pixel_counter]
1054 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2))
1057 exp = exposures[(band, order - 1)]
1061 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr
1062 elif sub_index == 1:
1063 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr
1064 elif sub_index == 2:
1065 exp.image.array[npix//2:, npix//2:] = binned_image_arr
1066 elif sub_index == 3:
1067 exp.image.array[0: npix//2, npix//2:] = binned_image_arr
1070 raise ValueError(
"Illegal pixel sub index")
1073 if order < max_order:
1074 exposures[(band, order)].image.array[:, :] = np.nan
1079 band_pixels = np.array([pixel
1080 for pixel, band_
in hips_exposure_handle_dict.keys()
1082 band_pixels = np.sort(band_pixels)
1084 self._write_properties_and_moc(
1085 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1093 self._write_allsky_file(
1094 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1098 self._write_properties_and_moc(
1099 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1107 self._write_allsky_file(
1108 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1112 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9):
1113 """Write a HiPS image.
1117 hips_base_path : `lsst.resources.ResourcePath`
1118 Resource path to the base of the HiPS directory tree.
1120 HEALPix order of the HiPS image to write.
1122 HEALPix pixel of the HiPS image.
1123 image : `lsst.afw.image.Image`
1125 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1126 Mapping to convert image to scaled png.
1127 shift_order : `int`, optional
1135 dir_number = self._get_dir_number(pixel)
1136 hips_dir = hips_base_path.join(
1144 wcs = makeHpxWcs(order, pixel, shift_order=shift_order)
1146 uri = hips_dir.join(f
"Npix{pixel}.fits")
1148 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1149 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata())
1151 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1155 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8)
1156 vals[~np.isfinite(image.array) | (image.array < 0)] = 0
1157 im = Image.fromarray(vals[::-1, :],
"L")
1159 uri = hips_dir.join(f
"Npix{pixel}.png")
1161 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1162 im.save(temporary_uri.ospath)
1164 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1166 def _write_hips_color_png(
1176 """Write a color png HiPS image.
1180 hips_base_path : `lsst.resources.ResourcePath`
1181 Resource path to the base of the HiPS directory tree.
1183 HEALPix order of the HiPS image to write.
1185 HEALPix pixel of the HiPS image.
1186 image_red : `lsst.afw.image.Image`
1187 Input for red channel of output png.
1188 image_green : `lsst.afw.image.Image`
1189 Input for green channel of output png.
1190 image_blue : `lsst.afw.image.Image`
1191 Input for blue channel of output png.
1192 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1193 Mapping to convert image to scaled png.
1200 dir_number = self._get_dir_number(pixel)
1201 hips_dir = hips_base_path.join(
1210 arr_red = image_red.array.copy()
1211 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0]
1212 arr_green = image_green.array.copy()
1213 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1]
1214 arr_blue = image_blue.array.copy()
1215 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2]
1217 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue)
1219 im = Image.fromarray(image_array[::-1, :, :], mode=
"RGB")
1221 uri = hips_dir.join(f
"Npix{pixel}.png")
1223 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1224 im.save(temporary_uri.ospath)
1226 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1228 def _write_properties_and_moc(
1238 """Write HiPS properties file and MOC.
1242 hips_base_path : : `lsst.resources.ResourcePath`
1243 Resource path to the base of the HiPS directory tree.
1245 Maximum HEALPix order.
1246 pixels : `np.ndarray` (N,)
1247 Array of pixels used.
1248 exposure : `lsst.afw.image.Exposure`
1249 Sample HPX exposure used for generating HiPS tiles.
1255 Is band multiband / color?
1257 area = hpg.nside_to_pixel_area(2**max_order, degrees=
True)*len(pixels)
1259 initial_ra = self.config.properties.initial_ra
1260 initial_dec = self.config.properties.initial_dec
1261 initial_fov = self.config.properties.initial_fov
1263 if initial_ra
is None or initial_dec
is None or initial_fov
is None:
1266 temp_pixels = pixels.copy()
1267 if temp_pixels.size % 2 == 0:
1268 temp_pixels = np.append(temp_pixels, [temp_pixels[0]])
1269 medpix = int(np.median(temp_pixels))
1270 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix)
1271 _initial_fov = hpg.nside_to_resolution(2**max_order, units=
'arcminutes')/60.
1273 if initial_ra
is None or initial_dec
is None:
1274 initial_ra = _initial_ra
1275 initial_dec = _initial_dec
1276 if initial_fov
is None:
1277 initial_fov = _initial_fov
1279 self._write_hips_properties_file(
1281 self.config.properties,
1294 self._write_hips_moc_file(
1300 def _write_hips_properties_file(
1314 """Write HiPS properties file.
1318 hips_base_path : `lsst.resources.ResourcePath`
1319 ResourcePath at top of HiPS tree. File will be written
1320 to this path as ``properties``.
1321 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig`
1322 Configuration for properties values.
1324 Name of band(s) for HiPS tree.
1326 Is multiband / color?
1327 exposure : `lsst.afw.image.Exposure`
1328 Sample HPX exposure used for generating HiPS tiles.
1330 Maximum HEALPix order.
1334 Coverage area in square degrees.
1335 initial_ra : `float`
1336 Initial HiPS RA position (degrees).
1337 initial_dec : `float`
1338 Initial HiPS Dec position (degrees).
1339 initial_fov : `float`
1340 Initial HiPS display size (degrees).
1346 def _write_property(fh, name, value):
1347 """Write a property name/value to a file handle.
1351 fh : file handle (blah)
1360 if re.search(
r"\s", name):
1361 raise ValueError(f
"``{name}`` cannot contain any space characters.")
1363 raise ValueError(f
"``{name}`` cannot contain an ``=``")
1365 fh.write(f
"{name:25}= {value}\n")
1367 if exposure.image.array.dtype == np.dtype(
"float32"):
1369 elif exposure.image.array.dtype == np.dtype(
"float64"):
1371 elif exposure.image.array.dtype == np.dtype(
"int32"):
1374 date_iso8601 = datetime.utcnow().isoformat(timespec=
"seconds") +
"Z"
1375 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units=
'degrees')
1377 uri = hips_base_path.join(
"properties")
1378 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1379 with open(temporary_uri.ospath,
"w")
as fh:
1383 properties_config.creator_did_template.format(band=band),
1385 if properties_config.obs_collection
is not None:
1386 _write_property(fh,
"obs_collection", properties_config.obs_collection)
1390 properties_config.obs_title_template.format(band=band),
1392 if properties_config.obs_description_template
is not None:
1396 properties_config.obs_description_template.format(band=band),
1398 if len(properties_config.prov_progenitor) > 0:
1399 for prov_progenitor
in properties_config.prov_progenitor:
1400 _write_property(fh,
"prov_progenitor", prov_progenitor)
1401 if properties_config.obs_ack
is not None:
1402 _write_property(fh,
"obs_ack", properties_config.obs_ack)
1403 _write_property(fh,
"obs_regime",
"Optical")
1404 _write_property(fh,
"data_pixel_bitpix", str(bitpix))
1405 _write_property(fh,
"dataproduct_type",
"image")
1406 _write_property(fh,
"moc_sky_fraction", str(area/41253.))
1407 _write_property(fh,
"data_ucd",
"phot.flux")
1408 _write_property(fh,
"hips_creation_date", date_iso8601)
1409 _write_property(fh,
"hips_builder",
"lsst.pipe.tasks.hips.GenerateHipsTask")
1410 _write_property(fh,
"hips_creator",
"Vera C. Rubin Observatory")
1411 _write_property(fh,
"hips_version",
"1.4")
1412 _write_property(fh,
"hips_release_date", date_iso8601)
1413 _write_property(fh,
"hips_frame",
"equatorial")
1414 _write_property(fh,
"hips_order", str(max_order))
1415 _write_property(fh,
"hips_tile_width", str(exposure.getBBox().getWidth()))
1416 _write_property(fh,
"hips_status",
"private master clonableOnce")
1418 _write_property(fh,
"hips_tile_format",
"png")
1419 _write_property(fh,
"dataproduct_subtype",
"color")
1421 _write_property(fh,
"hips_tile_format",
"png fits")
1422 _write_property(fh,
"hips_pixel_bitpix", str(bitpix))
1423 _write_property(fh,
"hips_pixel_scale", str(pixel_scale))
1424 _write_property(fh,
"hips_initial_ra", str(initial_ra))
1425 _write_property(fh,
"hips_initial_dec", str(initial_dec))
1426 _write_property(fh,
"hips_initial_fov", str(initial_fov))
1428 if self.config.blue_channel_band
in properties_config.spectral_ranges:
1429 em_min = properties_config.spectral_ranges[
1430 self.config.blue_channel_band
1433 self.log.warning(
"blue band %s not in self.config.spectral_ranges.", band)
1435 if self.config.red_channel_band
in properties_config.spectral_ranges:
1436 em_max = properties_config.spectral_ranges[
1437 self.config.red_channel_band
1440 self.log.warning(
"red band %s not in self.config.spectral_ranges.", band)
1443 if band
in properties_config.spectral_ranges:
1444 em_min = properties_config.spectral_ranges[band].lambda_min/1e9
1445 em_max = properties_config.spectral_ranges[band].lambda_max/1e9
1447 self.log.warning(
"band %s not in self.config.spectral_ranges.", band)
1450 _write_property(fh,
"em_min", str(em_min))
1451 _write_property(fh,
"em_max", str(em_max))
1452 if properties_config.t_min
is not None:
1453 _write_property(fh,
"t_min", properties_config.t_min)
1454 if properties_config.t_max
is not None:
1455 _write_property(fh,
"t_max", properties_config.t_max)
1457 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1459 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1):
1460 """Write HiPS MOC file.
1464 hips_base_path : `lsst.resources.ResourcePath`
1465 ResourcePath to top of HiPS tree. File will be written as
1466 to this path as ``Moc.fits``.
1468 Maximum HEALPix order.
1469 pixels : `np.ndarray`
1470 Array of pixels covered.
1471 min_uniq_order : `int`, optional
1472 Minimum HEALPix order for looking for fully covered pixels.
1480 uniq = 4*(4**max_order) + pixels
1483 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32)
1484 hspmap[pixels] = 1.0
1487 for uniq_order
in range(max_order - 1, min_uniq_order - 1, -1):
1488 hspmap = hspmap.degrade(2**uniq_order, reduction=
"sum")
1489 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order))
1491 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero()
1492 if covered.size == 0:
1496 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered]
1499 uniq = np.unique(uniq)
1502 tbl = np.zeros(uniq.size, dtype=[(
"UNIQ",
"i8")])
1505 order = np.log2(tbl[
"UNIQ"]//4).astype(np.int32)//2
1506 moc_order = np.max(order)
1508 hdu = fits.BinTableHDU(tbl)
1509 hdu.header[
"PIXTYPE"] =
"HEALPIX"
1510 hdu.header[
"ORDERING"] =
"NUNIQ"
1511 hdu.header[
"COORDSYS"] =
"C"
1512 hdu.header[
"MOCORDER"] = moc_order
1513 hdu.header[
"MOCTOOL"] =
"lsst.pipe.tasks.hips.GenerateHipsTask"
1515 uri = hips_base_path.join(
"Moc.fits")
1517 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1518 hdu.writeto(temporary_uri.ospath)
1520 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1522 def _write_allsky_file(self, hips_base_path, allsky_order):
1523 """Write an Allsky.png file.
1527 hips_base_path : `lsst.resources.ResourcePath`
1528 Resource path to the base of the HiPS directory tree.
1529 allsky_order : `int`
1530 HEALPix order of the minimum order to make allsky file.
1532 tile_size = self.config.allsky_tilesize
1533 n_tiles_per_side = int(np.sqrt(hpg.nside_to_npixel(hpg.order_to_nside(allsky_order))))
1537 allsky_order_uri = hips_base_path.join(f
"Norder{allsky_order}", forceDirectory=
True)
1538 pixel_regex = re.compile(
r"Npix([0-9]+)\.png$")
1540 ResourcePath.findFileResources(
1541 candidates=[allsky_order_uri],
1542 file_filter=pixel_regex,
1546 for png_uri
in png_uris:
1547 matches = re.match(pixel_regex, png_uri.basename())
1548 pix_num = int(matches.group(1))
1549 tile_image = Image.open(io.BytesIO(png_uri.read()))
1550 row = math.floor(pix_num//n_tiles_per_side)
1551 column = pix_num % n_tiles_per_side
1552 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size)
1553 tile_image_shrunk = tile_image.resize((tile_size, tile_size))
1555 if allsky_image
is None:
1556 allsky_image = Image.new(
1558 (n_tiles_per_side*tile_size, n_tiles_per_side*tile_size),
1560 allsky_image.paste(tile_image_shrunk, box)
1562 uri = allsky_order_uri.join(
"Allsky.png")
1564 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1565 allsky_image.save(temporary_uri.ospath)
1567 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1569 def _get_dir_number(self, pixel):
1570 """Compute the directory number from a pixel.
1575 HEALPix pixel number.
1580 HiPS directory number.
1582 return (pixel//10000)*10000
1585class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections,
1586 dimensions=(
"instrument", ),
1587 defaultTemplates={
"coaddName":
"deep"}):
1588 hips_exposure_handles = pipeBase.connectionTypes.Input(
1589 doc=
"HiPS-compatible HPX images.",
1590 name=
"{coaddName}Coadd_hpx",
1591 storageClass=
"ExposureF",
1592 dimensions=(
"healpix11",
"band"),
1598class GenerateColorHipsConfig(GenerateHipsConfig,
1599 pipelineConnections=GenerateColorHipsConnections):
1600 """Configuration parameters for GenerateColorHipsTask."""
1601 blue_channel_band = pexConfig.Field(
1602 doc=
"Band to use for blue channel of color pngs.",
1606 green_channel_band = pexConfig.Field(
1607 doc=
"Band to use for green channel of color pngs.",
1611 red_channel_band = pexConfig.Field(
1612 doc=
"Band to use for red channel of color pngs.",
1616 png_color_asinh_minimum = pexConfig.Field(
1617 doc=
"AsinhMapping intensity to be mapped to black for color png scaling.",
1621 png_color_asinh_stretch = pexConfig.Field(
1622 doc=
"AsinhMapping linear stretch for color png scaling.",
1626 png_color_asinh_softening = pexConfig.Field(
1627 doc=
"AsinhMapping softening parameter (Q) for color png scaling.",
1633class GenerateColorHipsTask(GenerateHipsTask):
1634 """Task for making a HiPS tree with color pngs."""
1635 ConfigClass = GenerateColorHipsConfig
1636 _DefaultName =
"generateColorHips"
1639 def _check_data_bands(self, data_bands):
1640 """Check the data for configured bands.
1642 Warn if any color bands are missing data.
1646 data_bands : `set` [`str`]
1647 Bands from the input data.
1651 bands : `list` [`str`]
1652 List of bands in bgr color order.
1654 if len(data_bands) == 0:
1655 raise RuntimeError(
"GenerateColorHipsTask must have data from at least one band.")
1657 if self.config.blue_channel_band
not in data_bands:
1659 "Color png blue_channel_band %s not in dataset.",
1660 self.config.blue_channel_band
1662 if self.config.green_channel_band
not in data_bands:
1664 "Color png green_channel_band %s not in dataset.",
1665 self.config.green_channel_band
1667 if self.config.red_channel_band
not in data_bands:
1669 "Color png red_channel_band %s not in dataset.",
1670 self.config.red_channel_band
1674 self.config.blue_channel_band,
1675 self.config.green_channel_band,
1676 self.config.red_channel_band,