22"""Tasks for making and manipulating HIPS images."""
24__all__ = [
"HighResolutionHipsTask",
"HighResolutionHipsConfig",
"HighResolutionHipsConnections",
25 "HighResolutionHipsQuantumGraphBuilder",
26 "GenerateHipsTask",
"GenerateHipsConfig",
"GenerateColorHipsTask",
"GenerateColorHipsConfig"]
28from collections
import defaultdict
36from datetime
import datetime
38import healsparse
as hsp
39from astropy.io
import fits
40from astropy.visualization.lupton_rgb
import AsinhMapping
44from lsst.utils.timer
import timeMethod
45from lsst.daf.butler
import Butler
47import lsst.pipe.base
as pipeBase
48from lsst.pipe.base.quantum_graph_builder
import QuantumGraphBuilder
49from lsst.pipe.base.quantum_graph_skeleton
import QuantumGraphSkeleton, DatasetKey
55from lsst.resources
import ResourcePath
57from .healSparseMapping
import _is_power_of_two
61 dimensions=(
"healpix9",
"band"),
62 defaultTemplates={
"coaddName":
"deep"}):
63 coadd_exposure_handles = pipeBase.connectionTypes.Input(
64 doc=
"Coadded exposures to convert to HIPS format.",
65 name=
"{coaddName}Coadd_calexp",
66 storageClass=
"ExposureF",
67 dimensions=(
"tract",
"patch",
"skymap",
"band"),
71 hips_exposures = pipeBase.connectionTypes.Output(
72 doc=
"HiPS-compatible HPX image.",
73 name=
"{coaddName}Coadd_hpx",
74 storageClass=
"ExposureF",
75 dimensions=(
"healpix11",
"band"),
79 def __init__(self, *, config=None):
80 super().__init__(config=config)
83 for dim
in self.dimensions:
85 if quantum_order
is not None:
86 raise ValueError(
"Must not specify more than one quantum healpix dimension.")
87 quantum_order = int(dim.split(
"healpix")[1])
88 if quantum_order
is None:
89 raise ValueError(
"Must specify a healpix dimension in quantum dimensions.")
91 if quantum_order > config.hips_order:
92 raise ValueError(
"Quantum healpix dimension order must not be greater than hips_order")
95 for dim
in self.hips_exposures.dimensions:
98 raise ValueError(
"Must not specify more than one healpix dimension.")
99 order = int(dim.split(
"healpix")[1])
101 raise ValueError(
"Must specify a healpix dimension in hips_exposure dimensions.")
103 if order != config.hips_order:
104 raise ValueError(
"healpix dimension order must match config.hips_order.")
107class HighResolutionHipsConfig(pipeBase.PipelineTaskConfig,
108 pipelineConnections=HighResolutionHipsConnections):
109 """Configuration parameters for HighResolutionHipsTask.
113 A HiPS image covers one HEALPix cell, with the HEALPix nside equal to
114 2**hips_order. Each cell is 'shift_order' orders deeper than the HEALPix
115 cell, with 2**shift_order x 2**shift_order sub-pixels on a side, which
116 defines the target resolution of the HiPS image. The IVOA recommends
117 shift_order=9, for 2**9=512 pixels on a side.
120 https://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf
121 shows the relationship between hips_order, number of tiles (full
122 sky coverage), cell size, and sub-pixel size/image resolution (with
123 the default shift_order=9):
124 +------------+-----------------+--------------+------------------+
125 | hips_order | Number of Tiles | Cell Size | Image Resolution |
126 +============+=================+==============+==================+
127 | 0 | 12 | 58.63 deg | 6.871 arcmin |
128 | 1 | 48 | 29.32 deg | 3.435 arcmin |
129 | 2 | 192 | 14.66 deg | 1.718 arcmin |
130 | 3 | 768 | 7.329 deg | 51.53 arcsec |
131 | 4 | 3072 | 3.665 deg | 25.77 arcsec |
132 | 5 | 12288 | 1.832 deg | 12.88 arcsec |
133 | 6 | 49152 | 54.97 arcmin | 6.442 arcsec |
134 | 7 | 196608 | 27.48 arcmin | 3.221 arcsec |
135 | 8 | 786432 | 13.74 arcmin | 1.61 arcsec |
136 | 9 | 3145728 | 6.871 arcmin | 805.2mas |
137 | 10 | 12582912 | 3.435 arcmin | 402.6mas |
138 | 11 | 50331648 | 1.718 arcmin | 201.3mas |
139 | 12 | 201326592 | 51.53 arcsec | 100.6mas |
140 | 13 | 805306368 | 25.77 arcsec | 50.32mas |
141 +------------+-----------------+--------------+------------------+
143 hips_order = pexConfig.Field(
144 doc=
"HIPS image order.",
148 shift_order = pexConfig.Field(
149 doc=
"HIPS shift order (such that each tile is 2**shift_order pixels on a side)",
153 warp = pexConfig.ConfigField(
154 dtype=afwMath.Warper.ConfigClass,
155 doc=
"Warper configuration",
158 def setDefaults(self):
159 self.warp.warpingKernelName =
"lanczos5"
162class HipsTaskNameDescriptor:
163 """Descriptor used create a DefaultName that matches the order of
164 the defined dimensions in the connections class.
169 The prefix of the Default name, to which the order will be
172 def __init__(self, prefix):
174 self._defaultName = f
"{prefix}{{}}"
177 def __get__(self, obj, klass=None):
180 "HipsTaskDescriptor was used in an unexpected context"
182 if self._order
is None:
183 klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions
184 for dim
in klassDimensions:
185 if (match := re.match(
r"^healpix(\d*)$", dim))
is not None:
186 self._order = int(match.group(1))
190 "Could not find healpix dimension in connections class"
192 return self._defaultName.format(self._order)
195class HighResolutionHipsTask(pipeBase.PipelineTask):
196 """Task for making high resolution HiPS images."""
197 ConfigClass = HighResolutionHipsConfig
198 _DefaultName = HipsTaskNameDescriptor(
"highResolutionHips")
200 def __init__(self, **kwargs):
201 super().__init__(**kwargs)
202 self.warper = afwMath.Warper.fromConfig(self.config.warp)
205 def runQuantum(self, butlerQC, inputRefs, outputRefs):
206 inputs = butlerQC.get(inputRefs)
208 healpix_dim = f
"healpix{self.config.hips_order}"
210 pixels = [hips_exposure.dataId[healpix_dim]
211 for hips_exposure
in outputRefs.hips_exposures]
213 outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs[
"coadd_exposure_handles"])
215 hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]:
216 hips_exposure_ref
for hips_exposure_ref
in outputRefs.hips_exposures}
217 for pixel, hips_exposure
in outputs.hips_exposures.items():
218 butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel])
220 def run(self, pixels, coadd_exposure_handles):
221 """Run the HighResolutionHipsTask.
225 pixels : `Iterable` [ `int` ]
226 Iterable of healpix pixels (nest ordering) to warp to.
227 coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`]
228 Handles for the coadd exposures.
232 outputs : `lsst.pipe.base.Struct`
233 ``hips_exposures`` is a dict with pixel (key) and hips_exposure (value)
235 self.log.info(
"Generating HPX images for %d pixels at order %d", len(pixels), self.config.hips_order)
237 npix = 2**self.config.shift_order
247 wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order)
248 exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx)
249 exp_hpx_dict[pixel] = exp_hpx
250 warp_dict[pixel] = []
255 for handle
in coadd_exposure_handles:
256 coadd_exp = handle.get()
260 warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx)
262 exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs())
263 exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask(
"NO_DATA"), np.nan)
268 exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict())
269 exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter())
270 exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib())
272 if warped.getBBox().getArea() == 0
or not np.any(np.isfinite(warped.image.array)):
275 "No overlap between output HPX %d and input exposure %s",
281 exp.maskedImage.assign(warped.maskedImage, warped.getBBox())
282 warp_dict[pixel].append(exp.maskedImage)
286 stats_flags = afwMath.stringToStatisticsProperty(
"MEAN")
287 stats_ctrl = afwMath.StatisticsControl()
288 stats_ctrl.setNanSafe(
True)
289 stats_ctrl.setWeighted(
True)
290 stats_ctrl.setCalcErrorFromInputVariance(
True)
296 exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask(
"NO_DATA"), np.nan)
298 if not warp_dict[pixel]:
300 self.log.debug(
"No data in HPX pixel %d", pixel)
303 exp_hpx_dict.pop(pixel)
306 exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack(
310 [1.0]*len(warp_dict[pixel]),
315 return pipeBase.Struct(hips_exposures=exp_hpx_dict)
318 def build_quantum_graph_cli(cls, argv):
319 """A command-line interface entry point to `build_quantum_graph`.
320 This method provides the implementation for the
321 ``build-high-resolution-hips-qg`` script.
325 argv : `Sequence` [ `str` ]
326 Command-line arguments (e.g. ``sys.argv[1:]``).
328 parser = cls._make_cli_parser()
330 args = parser.parse_args(argv)
332 if args.subparser_name
is None:
336 pipeline = pipeBase.Pipeline.from_uri(args.pipeline)
337 pipeline_graph = pipeline.to_graph()
339 if len(pipeline_graph.tasks) != 1:
340 raise RuntimeError(f
"Pipeline file {args.pipeline} may only contain one task.")
342 (task_node,) = pipeline_graph.tasks.values()
344 butler = Butler(args.butler_config, collections=args.input)
346 if args.subparser_name ==
"segment":
348 hpix_pixelization = HealpixPixelization(level=args.hpix_build_order)
349 dataset = task_node.inputs[
"coadd_exposure_handles"].dataset_type_name
350 data_ids = set(butler.registry.queryDataIds(
"tract", datasets=dataset).expanded())
352 for data_id
in data_ids:
353 region = data_id.region
354 pixel_range = hpix_pixelization.envelope(region)
355 for r
in pixel_range.ranges():
356 region_pixels.extend(range(r[0], r[1]))
357 indices = np.unique(region_pixels)
359 print(f
"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:")
360 for pixel
in indices:
363 elif args.subparser_name ==
"build":
367 if args.output_run
is None:
368 if args.output
is None:
369 raise ValueError(
"At least one of --output or --output-run options is required.")
370 args.output_run =
"{}/{}".format(args.output, pipeBase.Instrument.makeCollectionTimestamp())
372 build_ranges =
RangeSet(sorted(args.pixels))
377 "butler_argument": args.butler_config,
378 "output": args.output,
379 "output_run": args.output_run,
380 "data_query": args.where,
381 "time": f
"{datetime.now()}",
384 builder = HighResolutionHipsQuantumGraphBuilder(
387 input_collections=args.input,
388 output_run=args.output_run,
389 constraint_order=args.hpix_build_order,
390 constraint_ranges=build_ranges,
393 qg = builder.build(metadata, attach_datastore_records=
True)
394 qg.saveUri(args.save_qgraph)
397 def _make_cli_parser(cls):
398 """Make the command-line parser.
402 parser : `argparse.ArgumentParser`
404 parser = argparse.ArgumentParser(
406 "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets."
409 subparsers = parser.add_subparsers(help=
"sub-command help", dest=
"subparser_name")
411 parser_segment = subparsers.add_parser(
"segment",
412 help=
"Determine survey segments for workflow.")
413 parser_build = subparsers.add_parser(
"build",
414 help=
"Build quantum graph for HighResolutionHipsTask")
416 for sub
in [parser_segment, parser_build]:
422 help=
"Path to data repository or butler configuration.",
429 help=
"Pipeline file, limited to one task.",
437 help=
"Input collection(s) to search for coadd exposures.",
442 "--hpix_build_order",
445 help=
"HEALPix order to segment sky for building quantum graph files.",
452 help=
"Data ID expression used when querying for input coadd datasets.",
455 parser_build.add_argument(
459 "Name of the output CHAINED collection. If this options is specified and "
460 "--output-run is not, then a new RUN collection will be created by appending "
461 "a timestamp to the value of this option."
466 parser_build.add_argument(
470 "Output RUN collection to write resulting images. If not provided "
471 "then --output must be provided and a new RUN collection will be created "
472 "by appending a timestamp to the value passed with --output."
477 parser_build.add_argument(
481 help=
"Output filename for QuantumGraph.",
484 parser_build.add_argument(
489 help=
"Pixels at --hpix_build_order to generate quantum graph.",
496class HighResolutionHipsQuantumGraphBuilder(QuantumGraphBuilder):
497 """A custom a `lsst.pipe.base.QuantumGraphBuilder` for running
498 `HighResolutionHipsTask` only.
500 This is a temporary workaround for incomplete butler query support for
505 pipeline_graph : `lsst.pipe.base.PipelineGraph`
506 Pipeline graph with exactly one task, which must be a configuration
507 of `HighResolutionHipsTask`.
508 butler : `lsst.daf.butler.Butler`
509 Client for the butler data repository. May be read-only.
510 input_collections : `str` or `Iterable` [ `str` ], optional
511 Collection or collections to search for input datasets, in order.
512 If not provided, ``butler.collections`` will be searched.
513 output_run : `str`, optional
514 Name of the output collection. If not provided, ``butler.run`` will
516 constraint_order : `int`
517 HEALPix order used to constrain which quanta are generated, via
518 ``constraint_indices``. This should be a coarser grid (smaller
519 order) than the order used for the task's quantum and output data
520 IDs, and ideally something between the spatial scale of a patch or
521 the data repository's "common skypix" system (usually ``htm7``).
522 constraint_ranges : `lsst.sphgeom.RangeSet`
523 RangeSet that describes constraint pixels (HEALPix NEST, with order
524 ``constraint_order``) to constrain generated quanta.
525 where : `str`, optional
526 A boolean `str` expression of the form accepted by
527 `Registry.queryDatasets` to constrain input datasets. This may
528 contain a constraint on tracts, patches, or bands, but not HEALPix
529 indices. Constraints on tracts and patches should usually be
530 unnecessary, however - existing coadds that overlap the given
531 HEALpix indices will be selected without such a constraint, and
532 providing one may reject some that should normally be included.
540 input_collections=None,
546 super().__init__(pipeline_graph, butler, input_collections=input_collections, output_run=output_run)
547 self.constraint_order = constraint_order
548 self.constraint_ranges = constraint_ranges
551 def process_subgraph(self, subgraph):
553 (task_node,) = subgraph.tasks.values()
557 (input_dataset_type_node,) = subgraph.inputs_of(task_node.label).values()
558 assert input_dataset_type_node
is not None,
"PipelineGraph should be resolved by base class."
559 (output_edge,) = task_node.outputs.values()
560 output_dataset_type_node = subgraph.dataset_types[output_edge.parent_dataset_type_name]
561 (hpx_output_dimension,) = (
562 self.butler.dimensions.skypix_dimensions[d]
563 for d
in output_dataset_type_node.dimensions.skypix.names
565 constraint_hpx_pixelization = (
566 self.butler.dimensions.skypix_dimensions[f
"healpix{self.constraint_order}"].pixelization
568 common_skypix_name = self.butler.dimensions.commonSkyPix.name
569 common_skypix_pixelization = self.butler.dimensions.commonSkyPix.pixelization
573 self.butler.dimensions.skypix_dimensions[d]
for d
in task_node.dimensions.names
if d !=
"band"
575 hpx_pixelization = hpx_dimension.pixelization
576 if hpx_pixelization.level < self.constraint_order:
577 raise ValueError(f
"Quantum order {hpx_pixelization.level} must be < {self.constraint_order}")
578 hpx_ranges = self.constraint_ranges.scaled(4**(hpx_pixelization.level - self.constraint_order))
583 for begin, end
in self.constraint_ranges:
584 for hpx_index
in range(begin, end):
585 constraint_hpx_region = constraint_hpx_pixelization.pixel(hpx_index)
586 common_skypix_ranges |= common_skypix_pixelization.envelope(constraint_hpx_region)
590 for simp
in range(1, 10):
591 if len(common_skypix_ranges) < 100:
593 common_skypix_ranges.simplify(simp)
600 for n, (begin, end)
in enumerate(common_skypix_ranges):
603 where_terms.append(f
"{common_skypix_name} = cpx{n}")
604 bind[f
"cpx{n}"] = begin
606 where_terms.append(f
"({common_skypix_name} >= cpx{n}a AND {common_skypix_name} <= cpx{n}b)")
607 bind[f
"cpx{n}a"] = begin
608 bind[f
"cpx{n}b"] = stop
610 where =
" OR ".join(where_terms)
612 where = f
"({self.where}) AND ({' OR '.join(where_terms)})"
616 input_refs = self.butler.registry.queryDatasets(
617 input_dataset_type_node.dataset_type,
620 collections=self.input_collections,
623 inputs_by_patch = defaultdict(set)
624 patch_dimensions = self.butler.dimensions.conform([
"patch"])
625 for input_ref
in input_refs:
626 dataset_key = DatasetKey(input_ref.datasetType.name, input_ref.dataId.required_values)
627 self.existing_datasets.inputs[dataset_key] = input_ref
628 inputs_by_patch[input_ref.dataId.subset(patch_dimensions)].add(dataset_key)
629 if not inputs_by_patch:
630 message_body =
"\n".join(input_refs.explain_no_results())
631 raise RuntimeError(f
"No inputs found:\n{message_body}")
636 inputs_by_hpx = defaultdict(set)
637 for patch_data_id, input_keys_for_patch
in inputs_by_patch.items():
638 patch_hpx_ranges = hpx_pixelization.envelope(patch_data_id.region)
639 for begin, end
in patch_hpx_ranges & hpx_ranges:
640 for hpx_index
in range(begin, end):
641 inputs_by_hpx[hpx_index].update(input_keys_for_patch)
644 skeleton = QuantumGraphSkeleton([task_node.label])
645 for hpx_index, input_keys_for_hpx_index
in inputs_by_hpx.items():
647 input_keys_by_band = defaultdict(list)
648 for input_key
in input_keys_for_hpx_index:
649 input_ref = self.existing_datasets.inputs[input_key]
650 input_keys_by_band[input_ref.dataId[
"band"]].append(input_key)
652 for band, input_keys_for_band
in input_keys_by_band.items():
653 data_id = self.butler.registry.expandDataId({hpx_dimension.name: hpx_index,
"band": band})
654 quantum_key = skeleton.add_quantum_node(task_node.label, data_id)
656 skeleton.add_input_edges(quantum_key, input_keys_for_band)
658 hpx_pixel_ranges =
RangeSet(hpx_index)
659 hpx_output_ranges = hpx_pixel_ranges.scaled(
660 4**(task_node.config.hips_order - hpx_pixelization.level)
662 for begin, end
in hpx_output_ranges:
663 for hpx_output_index
in range(begin, end):
664 dataset_key = skeleton.add_dataset_node(
665 output_dataset_type_node.name,
666 self.butler.registry.expandDataId(
667 {hpx_output_dimension: hpx_output_index,
"band": band}
670 skeleton.add_output_edge(quantum_key, dataset_key)
672 for write_edge
in task_node.iter_all_outputs():
673 if write_edge.connection_name == output_edge.connection_name:
675 dataset_key = skeleton.add_dataset_node(write_edge.parent_dataset_type_name, data_id)
676 skeleton.add_output_edge(quantum_key, dataset_key)
680class HipsPropertiesSpectralTerm(pexConfig.Config):
681 lambda_min = pexConfig.Field(
682 doc=
"Minimum wavelength (nm)",
685 lambda_max = pexConfig.Field(
686 doc=
"Maximum wavelength (nm)",
691class HipsPropertiesConfig(pexConfig.Config):
692 """Configuration parameters for writing a HiPS properties file."""
693 creator_did_template = pexConfig.Field(
694 doc=(
"Unique identifier of the HiPS - Format: IVOID. "
695 "Use ``{band}`` to substitute the band name."),
699 obs_collection = pexConfig.Field(
700 doc=
"Short name of original data set - Format: one word",
704 obs_description_template = pexConfig.Field(
705 doc=(
"Data set description - Format: free text, longer free text "
706 "description of the dataset. Use ``{band}`` to substitute "
710 prov_progenitor = pexConfig.ListField(
711 doc=
"Provenance of the original data - Format: free text",
715 obs_title_template = pexConfig.Field(
716 doc=(
"Data set title format: free text, but should be short. "
717 "Use ``{band}`` to substitute the band name."),
721 spectral_ranges = pexConfig.ConfigDictField(
722 doc=(
"Mapping from band to lambda_min, lamba_max (nm). May be approximate."),
724 itemtype=HipsPropertiesSpectralTerm,
727 initial_ra = pexConfig.Field(
728 doc=
"Initial RA (deg) (default for HiPS viewer). If not set will use a point in MOC.",
732 initial_dec = pexConfig.Field(
733 doc=
"Initial Declination (deg) (default for HiPS viewer). If not set will use a point in MOC.",
737 initial_fov = pexConfig.Field(
738 doc=
"Initial field-of-view (deg). If not set will use ~1 healpix tile.",
742 obs_ack = pexConfig.Field(
743 doc=
"Observation acknowledgements (free text).",
747 t_min = pexConfig.Field(
748 doc=
"Time (MJD) of earliest observation included in HiPS",
752 t_max = pexConfig.Field(
753 doc=
"Time (MJD) of latest observation included in HiPS",
761 if self.obs_collection
is not None:
762 if re.search(
r"\s", self.obs_collection):
763 raise ValueError(
"obs_collection cannot contain any space characters.")
765 def setDefaults(self):
768 u_term = HipsPropertiesSpectralTerm()
769 u_term.lambda_min = 330.
770 u_term.lambda_max = 400.
771 self.spectral_ranges[
"u"] = u_term
772 g_term = HipsPropertiesSpectralTerm()
773 g_term.lambda_min = 402.
774 g_term.lambda_max = 552.
775 self.spectral_ranges[
"g"] = g_term
776 r_term = HipsPropertiesSpectralTerm()
777 r_term.lambda_min = 552.
778 r_term.lambda_max = 691.
779 self.spectral_ranges[
"r"] = r_term
780 i_term = HipsPropertiesSpectralTerm()
781 i_term.lambda_min = 691.
782 i_term.lambda_max = 818.
783 self.spectral_ranges[
"i"] = i_term
784 z_term = HipsPropertiesSpectralTerm()
785 z_term.lambda_min = 818.
786 z_term.lambda_max = 922.
787 self.spectral_ranges[
"z"] = z_term
788 y_term = HipsPropertiesSpectralTerm()
789 y_term.lambda_min = 970.
790 y_term.lambda_max = 1060.
791 self.spectral_ranges[
"y"] = y_term
794class GenerateHipsConnections(pipeBase.PipelineTaskConnections,
795 dimensions=(
"instrument",
"band"),
796 defaultTemplates={
"coaddName":
"deep"}):
797 hips_exposure_handles = pipeBase.connectionTypes.Input(
798 doc=
"HiPS-compatible HPX images.",
799 name=
"{coaddName}Coadd_hpx",
800 storageClass=
"ExposureF",
801 dimensions=(
"healpix11",
"band"),
807class GenerateHipsConfig(pipeBase.PipelineTaskConfig,
808 pipelineConnections=GenerateHipsConnections):
809 """Configuration parameters for GenerateHipsTask."""
814 hips_base_uri = pexConfig.Field(
815 doc=
"URI to HiPS base for output.",
819 min_order = pexConfig.Field(
820 doc=
"Minimum healpix order for HiPS tree.",
824 properties = pexConfig.ConfigField(
825 dtype=HipsPropertiesConfig,
826 doc=
"Configuration for properties file.",
828 allsky_tilesize = pexConfig.Field(
830 doc=
"Allsky tile size; must be power of 2. HiPS standard recommends 64x64 tiles.",
832 check=_is_power_of_two,
834 png_gray_asinh_minimum = pexConfig.Field(
835 doc=
"AsinhMapping intensity to be mapped to black for grayscale png scaling.",
839 png_gray_asinh_stretch = pexConfig.Field(
840 doc=
"AsinhMapping linear stretch for grayscale png scaling.",
844 png_gray_asinh_softening = pexConfig.Field(
845 doc=
"AsinhMapping softening parameter (Q) for grayscale png scaling.",
851class GenerateHipsTask(pipeBase.PipelineTask):
852 """Task for making a HiPS tree with FITS and grayscale PNGs."""
853 ConfigClass = GenerateHipsConfig
854 _DefaultName =
"generateHips"
858 def runQuantum(self, butlerQC, inputRefs, outputRefs):
859 inputs = butlerQC.get(inputRefs)
861 dims = inputRefs.hips_exposure_handles[0].dataId.dimensions.names
865 order = int(dim.split(
"healpix")[1])
869 raise RuntimeError(
"Could not determine healpix order for input exposures.")
871 hips_exposure_handle_dict = {
872 (hips_exposure_handle.dataId[healpix_dim],
873 hips_exposure_handle.dataId[
"band"]): hips_exposure_handle
874 for hips_exposure_handle
in inputs[
"hips_exposure_handles"]
877 data_bands = {hips_exposure_handle.dataId[
"band"]
878 for hips_exposure_handle
in inputs[
"hips_exposure_handles"]}
879 bands = self._check_data_bands(data_bands)
884 hips_exposure_handle_dict=hips_exposure_handle_dict,
885 do_color=self.color_task,
888 def _check_data_bands(self, data_bands):
889 """Check that the data has only a single band.
893 data_bands : `set` [`str`]
894 Bands from the input data.
898 bands : `list` [`str`]
899 List of single band to process.
903 RuntimeError if there is not exactly one band.
905 if len(data_bands) != 1:
906 raise RuntimeError(
"GenerateHipsTask can only use data from a single band.")
908 return list(data_bands)
911 def run(self, bands, max_order, hips_exposure_handle_dict, do_color=False):
912 """Run the GenerateHipsTask.
916 bands : `list [ `str` ]
917 List of bands to be processed (or single band).
919 HEALPix order of the maximum (native) HPX exposures.
920 hips_exposure_handle_dict : `dict` {`int`: `lsst.daf.butler.DeferredDatasetHandle`}
921 Dict of handles for the HiPS high-resolution exposures.
922 Key is (pixel number, ``band``).
923 do_color : `bool`, optional
924 Do color pngs instead of per-band grayscale.
926 min_order = self.config.min_order
929 png_grayscale_mapping = AsinhMapping(
930 self.config.png_gray_asinh_minimum,
931 self.config.png_gray_asinh_stretch,
932 Q=self.config.png_gray_asinh_softening,
935 png_color_mapping = AsinhMapping(
936 self.config.png_color_asinh_minimum,
937 self.config.png_color_asinh_stretch,
938 Q=self.config.png_color_asinh_softening,
941 bcb = self.config.blue_channel_band
942 gcb = self.config.green_channel_band
943 rcb = self.config.red_channel_band
944 colorstr = f
"{bcb}{gcb}{rcb}"
947 hips_base_path = ResourcePath(self.config.hips_base_uri, forceDirectory=
True)
951 pixels = np.unique(np.array([pixel
for pixel, _
in hips_exposure_handle_dict.keys()]))
954 pixels = np.append(pixels, [0])
958 pixels_shifted[max_order] = pixels
959 for order
in range(max_order - 1, min_order - 1, -1):
960 pixels_shifted[order] = np.right_shift(pixels_shifted[order + 1], 2)
963 for order
in range(min_order, max_order + 1):
964 pixels_shifted[order][-1] = -1
967 exp0 = list(hips_exposure_handle_dict.values())[0].get()
968 bbox = exp0.getBBox()
969 npix = bbox.getWidth()
970 shift_order = int(np.round(np.log2(npix)))
977 for order
in range(min_order, max_order + 1):
978 exp = exp0.Factory(bbox=bbox)
979 exp.image.array[:, :] = np.nan
980 exposures[(band, order)] = exp
983 for pixel_counter, pixel
in enumerate(pixels[:-1]):
984 self.log.debug(
"Working on high resolution pixel %d", pixel)
990 if (pixel, band)
in hips_exposure_handle_dict:
991 exposures[(band, max_order)] = hips_exposure_handle_dict[(pixel, band)].get()
997 for order
in range(max_order, min_order - 1, -1):
998 if pixels_shifted[order][pixel_counter + 1] == pixels_shifted[order][pixel_counter]:
1006 self._write_hips_image(
1007 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1009 pixels_shifted[order][pixel_counter],
1010 exposures[(band, order)].image,
1011 png_grayscale_mapping,
1012 shift_order=shift_order,
1016 self._write_hips_color_png(
1017 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1019 pixels_shifted[order][pixel_counter],
1020 exposures[(self.config.red_channel_band, order)].image,
1021 exposures[(self.config.green_channel_band, order)].image,
1022 exposures[(self.config.blue_channel_band, order)].image,
1026 log_level = self.log.INFO
if order == (max_order - 3)
else self.log.DEBUG
1029 "Completed HiPS generation for %s, order %d, pixel %d (%d/%d)",
1032 pixels_shifted[order][pixel_counter],
1038 if order == min_order:
1040 exposures[(band, order)].image.array[:, :] = np.nan
1045 arr = exposures[(band, order)].image.array.reshape(npix//2, 2, npix//2, 2)
1046 with warnings.catch_warnings():
1047 warnings.simplefilter(
"ignore")
1048 binned_image_arr = np.nanmean(arr, axis=(1, 3))
1052 sub_index = (pixels_shifted[order][pixel_counter]
1053 - np.left_shift(pixels_shifted[order - 1][pixel_counter], 2))
1056 exp = exposures[(band, order - 1)]
1060 exp.image.array[npix//2:, 0: npix//2] = binned_image_arr
1061 elif sub_index == 1:
1062 exp.image.array[0: npix//2, 0: npix//2] = binned_image_arr
1063 elif sub_index == 2:
1064 exp.image.array[npix//2:, npix//2:] = binned_image_arr
1065 elif sub_index == 3:
1066 exp.image.array[0: npix//2, npix//2:] = binned_image_arr
1069 raise ValueError(
"Illegal pixel sub index")
1072 if order < max_order:
1073 exposures[(band, order)].image.array[:, :] = np.nan
1078 band_pixels = np.array([pixel
1079 for pixel, band_
in hips_exposure_handle_dict.keys()
1081 band_pixels = np.sort(band_pixels)
1083 self._write_properties_and_moc(
1084 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1092 self._write_allsky_file(
1093 hips_base_path.join(f
"band_{band}", forceDirectory=
True),
1097 self._write_properties_and_moc(
1098 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1106 self._write_allsky_file(
1107 hips_base_path.join(f
"color_{colorstr}", forceDirectory=
True),
1111 def _write_hips_image(self, hips_base_path, order, pixel, image, png_mapping, shift_order=9):
1112 """Write a HiPS image.
1116 hips_base_path : `lsst.resources.ResourcePath`
1117 Resource path to the base of the HiPS directory tree.
1119 HEALPix order of the HiPS image to write.
1121 HEALPix pixel of the HiPS image.
1122 image : `lsst.afw.image.Image`
1124 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1125 Mapping to convert image to scaled png.
1126 shift_order : `int`, optional
1134 dir_number = self._get_dir_number(pixel)
1135 hips_dir = hips_base_path.join(
1143 wcs = makeHpxWcs(order, pixel, shift_order=shift_order)
1145 uri = hips_dir.join(f
"Npix{pixel}.fits")
1147 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1148 image.writeFits(temporary_uri.ospath, metadata=wcs.getFitsMetadata())
1150 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1154 with np.errstate(invalid=
"ignore"):
1155 vals = 255 - png_mapping.map_intensity_to_uint8(image.array).astype(np.uint8)
1157 vals[~np.isfinite(image.array) | (image.array < 0)] = 0
1158 im = Image.fromarray(vals[::-1, :],
"L")
1160 uri = hips_dir.join(f
"Npix{pixel}.png")
1162 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1163 im.save(temporary_uri.ospath)
1165 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1167 def _write_hips_color_png(
1177 """Write a color png HiPS image.
1181 hips_base_path : `lsst.resources.ResourcePath`
1182 Resource path to the base of the HiPS directory tree.
1184 HEALPix order of the HiPS image to write.
1186 HEALPix pixel of the HiPS image.
1187 image_red : `lsst.afw.image.Image`
1188 Input for red channel of output png.
1189 image_green : `lsst.afw.image.Image`
1190 Input for green channel of output png.
1191 image_blue : `lsst.afw.image.Image`
1192 Input for blue channel of output png.
1193 png_mapping : `astropy.visualization.lupton_rgb.AsinhMapping`
1194 Mapping to convert image to scaled png.
1201 dir_number = self._get_dir_number(pixel)
1202 hips_dir = hips_base_path.join(
1211 arr_red = image_red.array.copy()
1212 arr_red[np.isnan(arr_red)] = png_mapping.minimum[0]
1213 arr_green = image_green.array.copy()
1214 arr_green[np.isnan(arr_green)] = png_mapping.minimum[1]
1215 arr_blue = image_blue.array.copy()
1216 arr_blue[np.isnan(arr_blue)] = png_mapping.minimum[2]
1218 image_array = png_mapping.make_rgb_image(arr_red, arr_green, arr_blue)
1220 im = Image.fromarray(image_array[::-1, :, :], mode=
"RGB")
1222 uri = hips_dir.join(f
"Npix{pixel}.png")
1224 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1225 im.save(temporary_uri.ospath)
1227 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1229 def _write_properties_and_moc(
1239 """Write HiPS properties file and MOC.
1243 hips_base_path : : `lsst.resources.ResourcePath`
1244 Resource path to the base of the HiPS directory tree.
1246 Maximum HEALPix order.
1247 pixels : `np.ndarray` (N,)
1248 Array of pixels used.
1249 exposure : `lsst.afw.image.Exposure`
1250 Sample HPX exposure used for generating HiPS tiles.
1256 Is band multiband / color?
1258 area = hpg.nside_to_pixel_area(2**max_order, degrees=
True)*len(pixels)
1260 initial_ra = self.config.properties.initial_ra
1261 initial_dec = self.config.properties.initial_dec
1262 initial_fov = self.config.properties.initial_fov
1264 if initial_ra
is None or initial_dec
is None or initial_fov
is None:
1267 temp_pixels = pixels.copy()
1268 if temp_pixels.size % 2 == 0:
1269 temp_pixels = np.append(temp_pixels, [temp_pixels[0]])
1270 medpix = int(np.median(temp_pixels))
1271 _initial_ra, _initial_dec = hpg.pixel_to_angle(2**max_order, medpix)
1272 _initial_fov = hpg.nside_to_resolution(2**max_order, units=
'arcminutes')/60.
1274 if initial_ra
is None or initial_dec
is None:
1275 initial_ra = _initial_ra
1276 initial_dec = _initial_dec
1277 if initial_fov
is None:
1278 initial_fov = _initial_fov
1280 self._write_hips_properties_file(
1282 self.config.properties,
1295 self._write_hips_moc_file(
1301 def _write_hips_properties_file(
1315 """Write HiPS properties file.
1319 hips_base_path : `lsst.resources.ResourcePath`
1320 ResourcePath at top of HiPS tree. File will be written
1321 to this path as ``properties``.
1322 properties_config : `lsst.pipe.tasks.hips.HipsPropertiesConfig`
1323 Configuration for properties values.
1325 Name of band(s) for HiPS tree.
1327 Is multiband / color?
1328 exposure : `lsst.afw.image.Exposure`
1329 Sample HPX exposure used for generating HiPS tiles.
1331 Maximum HEALPix order.
1335 Coverage area in square degrees.
1336 initial_ra : `float`
1337 Initial HiPS RA position (degrees).
1338 initial_dec : `float`
1339 Initial HiPS Dec position (degrees).
1340 initial_fov : `float`
1341 Initial HiPS display size (degrees).
1347 def _write_property(fh, name, value):
1348 """Write a property name/value to a file handle.
1352 fh : file handle (blah)
1361 if re.search(
r"\s", name):
1362 raise ValueError(f
"``{name}`` cannot contain any space characters.")
1364 raise ValueError(f
"``{name}`` cannot contain an ``=``")
1366 fh.write(f
"{name:25}= {value}\n")
1368 if exposure.image.array.dtype == np.dtype(
"float32"):
1370 elif exposure.image.array.dtype == np.dtype(
"float64"):
1372 elif exposure.image.array.dtype == np.dtype(
"int32"):
1375 date_iso8601 = datetime.utcnow().isoformat(timespec=
"seconds") +
"Z"
1376 pixel_scale = hpg.nside_to_resolution(2**(max_order + shift_order), units=
'degrees')
1378 uri = hips_base_path.join(
"properties")
1379 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1380 with open(temporary_uri.ospath,
"w")
as fh:
1384 properties_config.creator_did_template.format(band=band),
1386 if properties_config.obs_collection
is not None:
1387 _write_property(fh,
"obs_collection", properties_config.obs_collection)
1391 properties_config.obs_title_template.format(band=band),
1393 if properties_config.obs_description_template
is not None:
1397 properties_config.obs_description_template.format(band=band),
1399 if len(properties_config.prov_progenitor) > 0:
1400 for prov_progenitor
in properties_config.prov_progenitor:
1401 _write_property(fh,
"prov_progenitor", prov_progenitor)
1402 if properties_config.obs_ack
is not None:
1403 _write_property(fh,
"obs_ack", properties_config.obs_ack)
1404 _write_property(fh,
"obs_regime",
"Optical")
1405 _write_property(fh,
"data_pixel_bitpix", str(bitpix))
1406 _write_property(fh,
"dataproduct_type",
"image")
1407 _write_property(fh,
"moc_sky_fraction", str(area/41253.))
1408 _write_property(fh,
"data_ucd",
"phot.flux")
1409 _write_property(fh,
"hips_creation_date", date_iso8601)
1410 _write_property(fh,
"hips_builder",
"lsst.pipe.tasks.hips.GenerateHipsTask")
1411 _write_property(fh,
"hips_creator",
"Vera C. Rubin Observatory")
1412 _write_property(fh,
"hips_version",
"1.4")
1413 _write_property(fh,
"hips_release_date", date_iso8601)
1414 _write_property(fh,
"hips_frame",
"equatorial")
1415 _write_property(fh,
"hips_order", str(max_order))
1416 _write_property(fh,
"hips_tile_width", str(exposure.getBBox().getWidth()))
1417 _write_property(fh,
"hips_status",
"private master clonableOnce")
1419 _write_property(fh,
"hips_tile_format",
"png")
1420 _write_property(fh,
"dataproduct_subtype",
"color")
1422 _write_property(fh,
"hips_tile_format",
"png fits")
1423 _write_property(fh,
"hips_pixel_bitpix", str(bitpix))
1424 _write_property(fh,
"hips_pixel_scale", str(pixel_scale))
1425 _write_property(fh,
"hips_initial_ra", str(initial_ra))
1426 _write_property(fh,
"hips_initial_dec", str(initial_dec))
1427 _write_property(fh,
"hips_initial_fov", str(initial_fov))
1429 if self.config.blue_channel_band
in properties_config.spectral_ranges:
1430 em_min = properties_config.spectral_ranges[
1431 self.config.blue_channel_band
1434 self.log.warning(
"blue band %s not in self.config.spectral_ranges.", band)
1436 if self.config.red_channel_band
in properties_config.spectral_ranges:
1437 em_max = properties_config.spectral_ranges[
1438 self.config.red_channel_band
1441 self.log.warning(
"red band %s not in self.config.spectral_ranges.", band)
1444 if band
in properties_config.spectral_ranges:
1445 em_min = properties_config.spectral_ranges[band].lambda_min/1e9
1446 em_max = properties_config.spectral_ranges[band].lambda_max/1e9
1448 self.log.warning(
"band %s not in self.config.spectral_ranges.", band)
1451 _write_property(fh,
"em_min", str(em_min))
1452 _write_property(fh,
"em_max", str(em_max))
1453 if properties_config.t_min
is not None:
1454 _write_property(fh,
"t_min", properties_config.t_min)
1455 if properties_config.t_max
is not None:
1456 _write_property(fh,
"t_max", properties_config.t_max)
1458 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1460 def _write_hips_moc_file(self, hips_base_path, max_order, pixels, min_uniq_order=1):
1461 """Write HiPS MOC file.
1465 hips_base_path : `lsst.resources.ResourcePath`
1466 ResourcePath to top of HiPS tree. File will be written as
1467 to this path as ``Moc.fits``.
1469 Maximum HEALPix order.
1470 pixels : `np.ndarray`
1471 Array of pixels covered.
1472 min_uniq_order : `int`, optional
1473 Minimum HEALPix order for looking for fully covered pixels.
1481 uniq = 4*(4**max_order) + pixels
1484 hspmap = hsp.HealSparseMap.make_empty(2**min_uniq_order, 2**max_order, dtype=np.float32)
1485 hspmap[pixels] = 1.0
1488 for uniq_order
in range(max_order - 1, min_uniq_order - 1, -1):
1489 hspmap = hspmap.degrade(2**uniq_order, reduction=
"sum")
1490 pix_shift = np.right_shift(pixels, 2*(max_order - uniq_order))
1492 covered, = np.isclose(hspmap[pix_shift], 4**(max_order - uniq_order)).nonzero()
1493 if covered.size == 0:
1497 uniq[covered] = 4*(4**uniq_order) + pix_shift[covered]
1500 uniq = np.unique(uniq)
1503 tbl = np.zeros(uniq.size, dtype=[(
"UNIQ",
"i8")])
1506 order = np.log2(tbl[
"UNIQ"]//4).astype(np.int32)//2
1507 moc_order = np.max(order)
1509 hdu = fits.BinTableHDU(tbl)
1510 hdu.header[
"PIXTYPE"] =
"HEALPIX"
1511 hdu.header[
"ORDERING"] =
"NUNIQ"
1512 hdu.header[
"COORDSYS"] =
"C"
1513 hdu.header[
"MOCORDER"] = moc_order
1514 hdu.header[
"MOCTOOL"] =
"lsst.pipe.tasks.hips.GenerateHipsTask"
1516 uri = hips_base_path.join(
"Moc.fits")
1518 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1519 hdu.writeto(temporary_uri.ospath)
1521 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1523 def _write_allsky_file(self, hips_base_path, allsky_order):
1524 """Write an Allsky.png file.
1528 hips_base_path : `lsst.resources.ResourcePath`
1529 Resource path to the base of the HiPS directory tree.
1530 allsky_order : `int`
1531 HEALPix order of the minimum order to make allsky file.
1533 tile_size = self.config.allsky_tilesize
1545 n_tiles = hpg.nside_to_npixel(hpg.order_to_nside(allsky_order))
1546 n_tiles_wide = int(np.floor(np.sqrt(n_tiles)))
1547 n_tiles_high = int(np.ceil(n_tiles / n_tiles_wide))
1551 allsky_order_uri = hips_base_path.join(f
"Norder{allsky_order}", forceDirectory=
True)
1552 pixel_regex = re.compile(
r"Npix([0-9]+)\.png$")
1554 ResourcePath.findFileResources(
1555 candidates=[allsky_order_uri],
1556 file_filter=pixel_regex,
1560 for png_uri
in png_uris:
1561 matches = re.match(pixel_regex, png_uri.basename())
1562 pix_num = int(matches.group(1))
1563 tile_image = Image.open(io.BytesIO(png_uri.read()))
1564 row = math.floor(pix_num//n_tiles_wide)
1565 column = pix_num % n_tiles_wide
1566 box = (column*tile_size, row*tile_size, (column + 1)*tile_size, (row + 1)*tile_size)
1567 tile_image_shrunk = tile_image.resize((tile_size, tile_size))
1569 if allsky_image
is None:
1570 allsky_image = Image.new(
1572 (n_tiles_wide*tile_size, n_tiles_high*tile_size),
1574 allsky_image.paste(tile_image_shrunk, box)
1576 uri = allsky_order_uri.join(
"Allsky.png")
1578 with ResourcePath.temporary_uri(suffix=uri.getExtension())
as temporary_uri:
1579 allsky_image.save(temporary_uri.ospath)
1581 uri.transfer_from(temporary_uri, transfer=
"copy", overwrite=
True)
1583 def _get_dir_number(self, pixel):
1584 """Compute the directory number from a pixel.
1589 HEALPix pixel number.
1594 HiPS directory number.
1596 return (pixel//10000)*10000
1599class GenerateColorHipsConnections(pipeBase.PipelineTaskConnections,
1600 dimensions=(
"instrument", ),
1601 defaultTemplates={
"coaddName":
"deep"}):
1602 hips_exposure_handles = pipeBase.connectionTypes.Input(
1603 doc=
"HiPS-compatible HPX images.",
1604 name=
"{coaddName}Coadd_hpx",
1605 storageClass=
"ExposureF",
1606 dimensions=(
"healpix11",
"band"),
1612class GenerateColorHipsConfig(GenerateHipsConfig,
1613 pipelineConnections=GenerateColorHipsConnections):
1614 """Configuration parameters for GenerateColorHipsTask."""
1615 blue_channel_band = pexConfig.Field(
1616 doc=
"Band to use for blue channel of color pngs.",
1620 green_channel_band = pexConfig.Field(
1621 doc=
"Band to use for green channel of color pngs.",
1625 red_channel_band = pexConfig.Field(
1626 doc=
"Band to use for red channel of color pngs.",
1630 png_color_asinh_minimum = pexConfig.Field(
1631 doc=
"AsinhMapping intensity to be mapped to black for color png scaling.",
1635 png_color_asinh_stretch = pexConfig.Field(
1636 doc=
"AsinhMapping linear stretch for color png scaling.",
1640 png_color_asinh_softening = pexConfig.Field(
1641 doc=
"AsinhMapping softening parameter (Q) for color png scaling.",
1647class GenerateColorHipsTask(GenerateHipsTask):
1648 """Task for making a HiPS tree with color pngs."""
1649 ConfigClass = GenerateColorHipsConfig
1650 _DefaultName =
"generateColorHips"
1653 def _check_data_bands(self, data_bands):
1654 """Check the data for configured bands.
1656 Warn if any color bands are missing data.
1660 data_bands : `set` [`str`]
1661 Bands from the input data.
1665 bands : `list` [`str`]
1666 List of bands in bgr color order.
1668 if len(data_bands) == 0:
1669 raise RuntimeError(
"GenerateColorHipsTask must have data from at least one band.")
1671 if self.config.blue_channel_band
not in data_bands:
1673 "Color png blue_channel_band %s not in dataset.",
1674 self.config.blue_channel_band
1676 if self.config.green_channel_band
not in data_bands:
1678 "Color png green_channel_band %s not in dataset.",
1679 self.config.green_channel_band
1681 if self.config.red_channel_band
not in data_bands:
1683 "Color png red_channel_band %s not in dataset.",
1684 self.config.red_channel_band
1688 self.config.blue_channel_band,
1689 self.config.green_channel_band,
1690 self.config.red_channel_band,