lsst.pipe.tasks g8b9e2231ea+fa07cb600e
|
Classes | |
class | HighResolutionHipsConnections |
Variables | |
int | shift_order = 512 pixels on a side. |
www | https ://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf |
str | prefix : `str` |
Iterable | pixels : `Iterable` [ `int` ] |
list | coadd_exposure_handles : `list` [`lsst.daf.butler.DeferredDatasetHandle`] |
lsst | outputs : `lsst.pipe.base.Struct` |
Sequence | argv : `Sequence` [ `str` ] |
argparse | parser : `argparse.ArgumentParser` |
lsst | task_def : `lsst.pipe.base.TaskDef` |
lsst | registry : `lsst.daf.butler.Registry` |
int | constraint_order : `int` |
lsst | constraint_ranges : `lsst.sphgeom.RangeSet` |
str | where : `str`, optional |
str | collections : `str` or `Iterable` [ `str` ], optional |
Tasks for making and manipulating HIPS images.
Sequence lsst.pipe.tasks.hips.argv : `Sequence` [ `str` ] |
self.log.info("Generating HIPS images for %d pixels at order %d", len(pixels), self.config.hips_order) npix = 2**self.config.shift_order bbox_hpx = geom.Box2I(corner=geom.Point2I(0, 0), dimensions=geom.Extent2I(npix, npix)) # For each healpix pixel we will create an empty exposure with the # correct HPX WCS. We furthermore create a dict to hold each of # the warps that will go into each HPX exposure. exp_hpx_dict = {} warp_dict = {} for pixel in pixels: wcs_hpx = afwGeom.makeHpxWcs(self.config.hips_order, pixel, shift_order=self.config.shift_order) exp_hpx = afwImage.ExposureF(bbox_hpx, wcs_hpx) exp_hpx_dict[pixel] = exp_hpx warp_dict[pixel] = [] first_handle = True # Loop over input coadd exposures to minimize i/o (this speeds things # up by ~8x to batch together pixels that overlap a given coadd). for handle in coadd_exposure_handles: coadd_exp = handle.get() # For each pixel, warp the coadd to the HPX WCS for the pixel. for pixel in pixels: warped = self.warper.warpExposure(exp_hpx_dict[pixel].getWcs(), coadd_exp, maxBBox=bbox_hpx) exp = afwImage.ExposureF(exp_hpx_dict[pixel].getBBox(), exp_hpx_dict[pixel].getWcs()) exp.maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan) if first_handle: # Make sure the mask planes, filter, and photocalib of the output # exposure match the (first) input exposure. exp_hpx_dict[pixel].mask.conformMaskPlanes(coadd_exp.mask.getMaskPlaneDict()) exp_hpx_dict[pixel].setFilter(coadd_exp.getFilter()) exp_hpx_dict[pixel].setPhotoCalib(coadd_exp.getPhotoCalib()) if warped.getBBox().getArea() == 0 or not np.any(np.isfinite(warped.image.array)): # There is no overlap, skip. self.log.debug( "No overlap between output HPX %d and input exposure %s", pixel, handle.dataId ) continue exp.maskedImage.assign(warped.maskedImage, warped.getBBox()) warp_dict[pixel].append(exp.maskedImage) first_handle = False stats_flags = afwMath.stringToStatisticsProperty('MEAN') stats_ctrl = afwMath.StatisticsControl() stats_ctrl.setNanSafe(True) stats_ctrl.setWeighted(True) stats_ctrl.setCalcErrorFromInputVariance(True) # Loop over pixels and combine the warps for each pixel. # The combination is done with a simple mean for pixels that # overlap in neighboring patches. for pixel in pixels: exp_hpx_dict[pixel].maskedImage.set(np.nan, afwImage.Mask.getPlaneBitMask("NO_DATA"), np.nan) if not warp_dict[pixel]: # Nothing in this pixel self.log.debug("No data in HPX pixel %d", pixel) # Remove the pixel from the output, no need to persist an # empty exposure. exp_hpx_dict.pop(pixel) continue exp_hpx_dict[pixel].maskedImage = afwMath.statisticsStack( warp_dict[pixel], stats_flags, stats_ctrl, [1.0]*len(warp_dict[pixel]), clipped=0, maskMap=[] ) return pipeBase.Struct(hips_exposures=exp_hpx_dict) @classmethod def build_quantum_graph_cli(cls, argv):
lsst lsst.pipe.tasks.hips.constraint_ranges : `lsst.sphgeom.RangeSet` |
www lsst.pipe.tasks.hips.https ://www.ivoa.net/documents/HiPS/20170519/REC-HIPS-1.0-20170519.pdf |
argparse lsst.pipe.tasks.hips.parser : `argparse.ArgumentParser` |
parser = cls._make_cli_parser() args = parser.parse_args(argv) if args.subparser_name is None: parser.print_help() sys.exit(1) pipeline = pipeBase.Pipeline.from_uri(args.pipeline) expanded_pipeline = list(pipeline.toExpandedPipeline()) if len(expanded_pipeline) != 1: raise RuntimeError(f"Pipeline file {args.pipeline} may only contain one task.") (task_def,) = expanded_pipeline butler = Butler(args.butler_config, collections=args.input) if args.subparser_name == 'segment': # Do the segmentation hpix_pixelization = HealpixPixelization(level=args.hpix_build_order) dataset = task_def.connections.coadd_exposure_handles.name data_ids = set(butler.registry.queryDataIds("tract", datasets=dataset).expanded()) region_pixels = [] for data_id in data_ids: region = data_id.region pixel_range = hpix_pixelization.envelope(region) for r in pixel_range.ranges(): region_pixels.extend(range(r[0], r[1])) indices = np.unique(region_pixels) print(f"Pixels to run at HEALPix order --hpix_build_order {args.hpix_build_order}:") for pixel in indices: print(pixel) elif args.subparser_name == 'build': # Build the quantum graph. build_ranges = RangeSet(sorted(args.pixels)) qg = cls.build_quantum_graph( task_def, butler.registry, args.hpix_build_order, build_ranges, where=args.where, collections=args.input ) qg.saveUri(args.save_qgraph) @classmethod def _make_cli_parser(cls):
Iterable lsst.pipe.tasks.hips.pixels : `Iterable` [ `int` ] |
def __init__(self, prefix): # create a defaultName template self._defaultName = f"{prefix}{{}}" self._order = None def __get__(self, obj, klass=None): if klass is None: raise RuntimeError( "HipsTaskDescriptor was used in an unexpected context" ) if self._order is None: klassDimensions = klass.ConfigClass.ConnectionsClass.dimensions for dim in klassDimensions: if (match := re.match(r"^healpix(\d*)$", dim)) is not None: self._order = int(match.group(1)) break else: raise RuntimeError( "Could not find healpix dimension in connections class" ) return self._defaultName.format(self._order) class HighResolutionHipsTask(pipeBase.PipelineTask):
ConfigClass = HighResolutionHipsConfig _DefaultName = HipsTaskNameDescriptor("highResolutionHips") def __init__(self, **kwargs): super().__init__(**kwargs) self.warper = afwMath.Warper.fromConfig(self.config.warp) @timeMethod def runQuantum(self, butlerQC, inputRefs, outputRefs): inputs = butlerQC.get(inputRefs) healpix_dim = f"healpix{self.config.hips_order}" pixels = [hips_exposure.dataId[healpix_dim] for hips_exposure in outputRefs.hips_exposures] outputs = self.run(pixels=pixels, coadd_exposure_handles=inputs["coadd_exposure_handles"]) hips_exposure_ref_dict = {hips_exposure_ref.dataId[healpix_dim]: hips_exposure_ref for hips_exposure_ref in outputRefs.hips_exposures} for pixel, hips_exposure in outputs.hips_exposures.items(): butlerQC.put(hips_exposure, hips_exposure_ref_dict[pixel]) def run(self, pixels, coadd_exposure_handles):
hips_order = pexConfig.Field( doc="HIPS image order.", dtype=int, default=11, ) shift_order = pexConfig.Field( doc="HIPS shift order (such that each tile is 2**shift_order pixels on a side)", dtype=int, default=9, ) warp = pexConfig.ConfigField( dtype=afwMath.Warper.ConfigClass, doc="Warper configuration", ) def setDefaults(self): self.warp.warpingKernelName = "lanczos5" class HipsTaskNameDescriptor:
lsst lsst.pipe.tasks.hips.registry : `lsst.daf.butler.Registry` |
lsst lsst.pipe.tasks.hips.task_def : `lsst.pipe.base.TaskDef` |
parser = argparse.ArgumentParser( description=( "Build a QuantumGraph that runs HighResolutionHipsTask on existing coadd datasets." ), ) subparsers = parser.add_subparsers(help='sub-command help', dest='subparser_name') parser_segment = subparsers.add_parser('segment', help='Determine survey segments for workflow.') parser_build = subparsers.add_parser('build', help='Build quantum graph for HighResolutionHipsTask') for sub in [parser_segment, parser_build]: # These arguments are in common. sub.add_argument( "-b", "--butler-config", type=str, help="Path to data repository or butler configuration.", required=True, ) sub.add_argument( "-p", "--pipeline", type=str, help="Pipeline file, limited to one task.", required=True, ) sub.add_argument( "-i", "--input", type=str, nargs="+", help="Input collection(s) to search for coadd exposures.", required=True, ) sub.add_argument( "-o", "--hpix_build_order", type=int, default=1, help="HEALPix order to segment sky for building quantum graph files.", ) sub.add_argument( "-w", "--where", type=str, default=None, help="Data ID expression used when querying for input coadd datasets.", ) parser_build.add_argument( "-q", "--save-qgraph", type=str, help="Output filename for QuantumGraph.", required=True, ) parser_build.add_argument( "-P", "--pixels", type=int, nargs="+", help="Pixels at --hpix_build_order to generate quantum graph.", required=True, ) return parser @classmethod def build_quantum_graph( cls, task_def, registry, constraint_order, constraint_ranges, where=None, collections=None, ):