25import lsst.coadd.utils
as coaddUtils
27from lsst.obs.base
import ExposureIdInfo
29from .references
import MultiBandReferencesTask
30from .forcedMeasurement
import ForcedMeasurementTask
31from .applyApCorr
import ApplyApCorrTask
32from .catalogCalculation
import CatalogCalculationTask
34__all__ = (
"ForcedPhotCoaddConfig",
"ForcedPhotCoaddTask")
38 """Get the psfCache setting into ForcedPhotCoaddTask"""
41 return pipeBase.ButlerInitializedTaskRunner.getTargetList(parsedCmd,
42 psfCache=parsedCmd.psfCache)
46 dimensions=(
"band",
"skymap",
"tract",
"patch"),
47 defaultTemplates={
"inputCoaddName":
"deep",
48 "outputCoaddName":
"deep"}):
49 inputSchema = pipeBase.connectionTypes.InitInput(
50 doc=
"Schema for the input measurement catalogs.",
51 name=
"{inputCoaddName}Coadd_ref_schema",
52 storageClass=
"SourceCatalog",
54 outputSchema = pipeBase.connectionTypes.InitOutput(
55 doc=
"Schema for the output forced measurement catalogs.",
56 name=
"{outputCoaddName}Coadd_forced_src_schema",
57 storageClass=
"SourceCatalog",
59 exposure = pipeBase.connectionTypes.Input(
60 doc=
"Input exposure to perform photometry on.",
61 name=
"{inputCoaddName}Coadd_calexp",
62 storageClass=
"ExposureF",
63 dimensions=[
"band",
"skymap",
"tract",
"patch"],
65 refCat = pipeBase.connectionTypes.Input(
66 doc=
"Catalog of shapes and positions at which to force photometry.",
67 name=
"{inputCoaddName}Coadd_ref",
68 storageClass=
"SourceCatalog",
69 dimensions=[
"skymap",
"tract",
"patch"],
71 refCatInBand = pipeBase.connectionTypes.Input(
72 doc=
"Catalog of shapes and positions in the band having forced photometry done",
73 name=
"{inputCoaddName}Coadd_meas",
74 storageClass=
"SourceCatalog",
75 dimensions=(
"band",
"skymap",
"tract",
"patch")
77 footprintCatInBand = pipeBase.connectionTypes.Input(
78 doc=
"Catalog of footprints to attach to sources",
79 name=
"{inputCoaddName}Coadd_deblendedFlux",
80 storageClass=
"SourceCatalog",
81 dimensions=(
"band",
"skymap",
"tract",
"patch")
83 scarletModels = pipeBase.connectionTypes.Input(
84 doc=
"Multiband scarlet models produced by the deblender",
85 name=
"{inputCoaddName}Coadd_scarletModelData",
86 storageClass=
"ScarletModelData",
87 dimensions=(
"tract",
"patch",
"skymap"),
89 refWcs = pipeBase.connectionTypes.Input(
90 doc=
"Reference world coordinate system.",
91 name=
"{inputCoaddName}Coadd.wcs",
93 dimensions=[
"band",
"skymap",
"tract",
"patch"],
95 measCat = pipeBase.connectionTypes.Output(
96 doc=
"Output forced photometry catalog.",
97 name=
"{outputCoaddName}Coadd_forced_src",
98 storageClass=
"SourceCatalog",
99 dimensions=[
"band",
"skymap",
"tract",
"patch"],
102 def __init__(self, *, config=None):
103 super().__init__(config=config)
104 if config.footprintDatasetName !=
"ScarletModelData":
105 self.inputs.remove(
"scarletModels")
106 if config.footprintDatasetName !=
"DeblendedFlux":
107 self.inputs.remove(
"footprintCatInBand")
108 print(
"forced_src inputs\n", self.inputs)
111class ForcedPhotCoaddConfig(pipeBase.PipelineTaskConfig,
112 pipelineConnections=ForcedPhotCoaddConnections):
113 references = lsst.pex.config.ConfigurableField(
114 target=MultiBandReferencesTask,
115 doc=
"subtask to retrieve reference source catalog"
117 measurement = lsst.pex.config.ConfigurableField(
118 target=ForcedMeasurementTask,
119 doc=
"subtask to do forced measurement"
121 coaddName = lsst.pex.config.Field(
122 doc=
"coadd name: typically one of deep or goodSeeing",
126 doApCorr = lsst.pex.config.Field(
129 doc=
"Run subtask to apply aperture corrections"
131 applyApCorr = lsst.pex.config.ConfigurableField(
132 target=ApplyApCorrTask,
133 doc=
"Subtask to apply aperture corrections"
135 catalogCalculation = lsst.pex.config.ConfigurableField(
136 target=CatalogCalculationTask,
137 doc=
"Subtask to run catalogCalculation plugins on catalog"
139 footprintDatasetName = lsst.pex.config.Field(
140 doc=
"Dataset (without coadd prefix) that should be used to obtain (Heavy)Footprints for sources. "
141 "Must have IDs that match those of the reference catalog."
142 "If None, Footprints will be generated by transforming the reference Footprints.",
144 default=
"ScarletModelData",
147 doConserveFlux = lsst.pex.config.Field(
150 doc=
"Whether to use the deblender models as templates to re-distribute the flux "
151 "from the 'exposure' (True), or to perform measurements on the deblender model footprints. "
152 "If footprintDatasetName != 'ScarletModelData' then this field is ignored.")
153 doStripFootprints = lsst.pex.config.Field(
156 doc=
"Whether to strip footprints from the output catalog before "
158 "This is usually done when using scarlet models to save disk space.")
159 hasFakes = lsst.pex.config.Field(
162 doc=
"Should be set to True if fake sources have been inserted into the input data."
165 def setDefaults(self):
169 super().setDefaults()
171 self.catalogCalculation.plugins.names = []
172 self.measurement.copyColumns[
"id"] =
"id"
173 self.measurement.copyColumns[
"parent"] =
"parent"
174 self.references.removePatchOverlaps =
False
175 self.measurement.plugins.names |= [
'base_InputCount',
'base_Variance']
176 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
177 'REJECTED',
'INEXACT_PSF']
178 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
179 'REJECTED',
'INEXACT_PSF']
183 if (self.measurement.doReplaceWithNoise
and self.footprintDatasetName
is not None
184 and self.references.removePatchOverlaps):
185 raise ValueError(
"Cannot use removePatchOverlaps=True with deblended footprints, as parent "
186 "sources may be rejected while their children are not.")
189class ForcedPhotCoaddTask(pipeBase.PipelineTask, pipeBase.CmdLineTask):
190 """A command-line driver for performing forced measurement on coadd images.
194 butler : `lsst.daf.persistence.butler.Butler`, optional
195 A Butler which will be passed to the references subtask to allow it to
196 load its schema from disk. Optional, but must be specified
if
197 ``refSchema``
is not;
if both are specified, ``refSchema`` takes
200 The schema of the reference catalog, passed to the constructor of the
201 references subtask. Optional, but must be specified
if ``butler``
is
202 not;
if both are specified, ``refSchema`` takes precedence.
204 Keyword arguments are passed to the supertask constructor.
207 ConfigClass = ForcedPhotCoaddConfig
208 RunnerClass = ForcedPhotCoaddRunner
209 _DefaultName = "forcedPhotCoadd"
210 dataPrefix =
"deepCoadd_"
212 def __init__(self, butler=None, refSchema=None, initInputs=None, **kwds):
213 super().__init__(**kwds)
215 if initInputs
is not None:
216 refSchema = initInputs[
'inputSchema'].schema
218 self.makeSubtask(
"references", butler=butler, schema=refSchema)
219 if refSchema
is None:
220 refSchema = self.references.schema
221 self.makeSubtask(
"measurement", refSchema=refSchema)
224 if self.config.doApCorr:
225 self.makeSubtask(
"applyApCorr", schema=self.measurement.schema)
226 self.makeSubtask(
'catalogCalculation', schema=self.measurement.schema)
229 def runQuantum(self, butlerQC, inputRefs, outputRefs):
230 inputs = butlerQC.get(inputRefs)
232 refCatInBand = inputs.pop(
'refCatInBand')
233 if self.config.footprintDatasetName ==
"ScarletModelData":
234 footprintData = inputs.pop(
"scarletModels")
235 elif self.config.footprintDatasetName ==
"DeblendedFlux":
236 footprintData = inputs.pop(
"footprintCatIndBand")
239 inputs[
'measCat'], inputs[
'exposureId'] = self.generateMeasCat(inputRefs.exposure.dataId,
246 outputs = self.run(**inputs)
248 if self.config.footprintDatasetName ==
"ScarletModelData" and self.config.doStripFootprints:
249 sources = outputs.measCat
250 for source
in sources[sources[
"parent"] != 0]:
251 source.setFootprint(
None)
252 butlerQC.put(outputs, outputRefs)
254 def generateMeasCat(self, exposureDataId, exposure, refCat, refCatInBand, refWcs, idPackerName,
256 """Generate a measurement catalog for Gen3.
260 exposureDataId : `DataId`
261 Butler dataId for this exposure.
262 exposure : `lsst.afw.image.exposure.Exposure`
263 Exposure to generate the catalog
for.
265 Catalog of shapes
and positions at which to force photometry.
267 Catalog of shapes
and position
in the band forced photometry
is
268 currently being performed
269 refWcs : `lsst.afw.image.SkyWcs`
270 Reference world coordinate system.
272 Type of ID packer to construct
from the registry.
274 Either the scarlet data models
or the deblended catalog
275 containing footprints.
276 If `footprintData`
is `
None` then the footprints contained
277 in `refCatInBand` are used.
282 Catalog of forced sources to measure.
284 Unique binary id associated
with the input exposure
289 Raised
if a footprint
with a given source id was
in the reference
290 catalog but
not in the reference catalog
in band (meaning there
291 was some sort of mismatch
in the two input catalogs)
293 exposureIdInfo = ExposureIdInfo.fromDataId(exposureDataId, idPackerName)
294 idFactory = exposureIdInfo.makeSourceIdFactory()
296 measCat = self.measurement.generateMeasCat(exposure, refCat, refWcs,
300 if self.config.footprintDatasetName ==
"ScarletModelData":
302 self._attachScarletFootprints(
304 modelData=footprintData,
306 band=exposureDataId[
"band"]
309 if self.config.footprintDatasetName
is None:
310 footprintCat = refCatInBand
312 footprintCat = footprintData
313 for srcRecord
in measCat:
314 fpRecord = footprintCat.find(srcRecord.getId())
316 raise LookupError(
"Cannot find Footprint for source {}; please check that {} "
317 "IDs are compatible with reference source IDs"
318 .format(srcRecord.getId(), footprintCat))
319 srcRecord.setFootprint(fpRecord.getFootprint())
320 return measCat, exposureIdInfo.expId
322 def runDataRef(self, dataRef, psfCache=None):
323 """Perform forced measurement on a single exposure.
327 dataRef : `lsst.daf.persistence.ButlerDataRef`
328 Passed to the ``references`` subtask to obtain the reference WCS,
329 the ``getExposure`` method (implemented by derived classes) to
330 read the measurment image, and the ``fetchReferences`` method to
331 get the exposure
and load the reference catalog (see
334 and data ID keys which are used.
335 psfCache : `int`, optional
336 Size of PSF cache,
or `
None`. The size of the PSF cache can have
337 a significant effect upon the runtime
for complicated PSF models.
341 Sources are generated
with ``generateMeasCat``
in the ``measurement``
342 subtask. These are passed to ``measurement``
's ``run`` method, which
343 fills the source catalog with the forced measurement results. The
344 sources are then passed to the ``writeOutputs`` method (implemented by
345 derived classes) which writes the outputs.
347 refWcs = self.references.getWcs(dataRef)
348 exposure = self.getExposure(dataRef)
349 if psfCache
is not None:
350 exposure.getPsf().setCacheCapacity(psfCache)
351 refCat = self.fetchReferences(dataRef, exposure)
353 exposureId = coaddUtils.getGen3CoaddExposureId(dataRef, coaddName=self.config.coaddName,
354 includeBand=
False, log=self.log)
355 measCat = self.measurement.generateMeasCat(
356 exposure, refCat, refWcs, idFactory=self.makeIdFactory(dataRef, exposureId=exposureId))
357 self.log.info(
"Performing forced measurement on %s", dataRef.dataId)
358 self.attachFootprints(measCat, refCat, exposure, refWcs, dataRef)
360 forcedPhotResult = self.run(measCat, exposure, refCat, refWcs, exposureId=exposureId)
362 self.writeOutput(dataRef, forcedPhotResult.measCat)
364 def run(self, measCat, exposure, refCat, refWcs, exposureId=None):
365 """Perform forced measurement on a single exposure.
370 The measurement catalog, based on the sources listed in the
373 The measurement image upon which to perform forced detection.
375 The reference catalog of sources to measure.
376 refWcs : `lsst.afw.image.SkyWcs`
377 The WCS
for the references.
379 Optional unique exposureId used
for random seed
in measurement
384 result : ~`lsst.pipe.base.Struct`
385 Structure
with fields:
388 Catalog of forced measurement results
391 self.measurement.run(measCat, exposure, refCat, refWcs, exposureId=exposureId)
392 if self.config.doApCorr:
393 self.applyApCorr.run(
395 apCorrMap=exposure.getInfo().getApCorrMap()
397 self.catalogCalculation.run(measCat)
399 return pipeBase.Struct(measCat=measCat)
401 def makeIdFactory(self, dataRef, exposureId):
402 """Create an object that generates globally unique source IDs.
404 Source IDs are created based on a per-CCD ID and the ID of the CCD
409 dataRef : `lsst.daf.persistence.ButlerDataRef`
410 Butler data reference. The
"CoaddId_bits" and "CoaddId" datasets
411 are accessed. The data ID must have tract
and patch keys.
420 exposureIdInfo = ExposureIdInfo(exposureId, dataRef.get(self.config.coaddName +
"CoaddId_bits"))
421 return exposureIdInfo.makeSourceIdFactory()
424 """Return an iterable of reference sources which overlap the exposure.
428 dataRef : `lsst.daf.persistence.ButlerDataRef`
429 Butler data reference corresponding to the image to be measured;
430 should have tract, patch, and filter keys.
437 All work
is delegated to the references subtask; see
438 `CoaddSrcReferencesTask`
for information about the default behavior.
440 skyMap = dataRef.get(self.dataPrefix + "skyMap", immediate=
True)
441 tractInfo = skyMap[dataRef.dataId[
"tract"]]
442 patch = tuple(int(v)
for v
in dataRef.dataId[
"patch"].split(
","))
443 patchInfo = tractInfo.getPatchInfo(patch)
445 references.extend(self.references.fetchInPatches(dataRef, patchList=[patchInfo]))
449 r"""Attach Footprints to source records.
451 For coadd forced photometry, we use the deblended "heavy"
453 of the same band - because we
've guaranteed that the peaks (and hence
454 child sources) will be consistent across all bands before we get to
455 measurement, this should yield reasonable deblending
for most sources.
456 It
's most likely limitation is that it will not provide good flux
457 upper limits for sources that were
not detected
in this band but were
458 blended
with sources that were.
460 if self.config.footprintDatasetName
is None:
461 return self.measurement.attachTransformedFootprints(sources, refCat, exposure, refWcs)
463 self.log.info(
"Loading deblended footprints for sources from %s, %s",
464 self.config.footprintDatasetName, dataRef.dataId)
466 if self.config.footprintDatasetName ==
"ScarletModelData":
468 dataModel = dataRef.get(
"%sCoadd_%s" % (self.config.coaddName, self.config.footprintDatasetName),
470 self._attachScarletFootprints(refCat, dataModel, exposure, dataRef.dataId[
"band"])
472 fpCat = dataRef.get(
"%sCoadd_%s" % (self.config.coaddName, self.config.footprintDatasetName),
474 for refRecord, srcRecord
in zip(refCat, sources):
475 fpRecord = fpCat.find(refRecord.getId())
477 raise LookupError(
"Cannot find Footprint for source %s; please check that %sCoadd_%s "
478 "IDs are compatible with reference source IDs" %
479 (srcRecord.getId(), self.config.coaddName,
480 self.config.footprintDatasetName))
481 srcRecord.setFootprint(fpRecord.getFootprint())
483 def _attachScarletFootprints(self, catalog, modelData, exposure, band):
484 """Attach scarlet models as HeavyFootprints
486 if self.config.doConserveFlux:
487 redistributeImage = exposure.image
489 redistributeImage =
None
491 modelData.updateCatalogFootprints(
494 psfModel=exposure.getPsf(),
495 redistributeImage=redistributeImage,
496 removeScarletData=
True,
497 updateFluxColumns=
False,
501 """Read input exposure on which measurement will be performed.
505 dataRef : `lsst.daf.persistence.ButlerDataRef`
506 Butler data reference.
508 if self.config.hasFakes:
509 name =
"fakes_" + self.config.coaddName +
"Coadd_calexp"
511 name = self.config.coaddName +
"Coadd_calexp"
513 return dataRef.get(name)
if dataRef.datasetExists(name)
else None
516 """Write forced source table
520 dataRef : `lsst.daf.persistence.ButlerDataRef`
521 Butler data reference. The forced_src dataset (with
522 self.dataPrefix prepended)
is all that will be modified.
524 Catalog of sources to save.
526 dataRef.put(sources, self.dataPrefix + "forced_src", flags=lsst.afw.table.SOURCE_IO_NO_FOOTPRINTS)
529 """The schema catalogs that will be used by this task.
533 schemaCatalogs : `dict`
534 Dictionary mapping dataset type to schema catalog.
538 There is only one schema
for each type of forced measurement. The
539 dataset type
for this measurement
is defined
in the mapper.
542 catalog.getTable().setMetadata(self.measurement.algMetadata)
543 datasetType = self.dataPrefix + "forced_src"
544 return {datasetType: catalog}
546 def _getConfigName(self):
548 return self.dataPrefix +
"forced_config"
550 def _getMetadataName(self):
552 return self.dataPrefix +
"forced_metadata"
555 def _makeArgumentParser(cls):
556 parser = pipeBase.ArgumentParser(name=cls._DefaultName)
557 parser.add_id_argument(
"--id",
"deepCoadd_forced_src", help=
"data ID, with raw CCD keys + tract",
558 ContainerClass=coaddUtils.CoaddDataIdContainer)
559 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
def getTargetList(parsedCmd, **kwargs)
def attachFootprints(self, sources, refCat, exposure, refWcs, dataRef)
def writeOutput(self, dataRef, sources)
def fetchReferences(self, dataRef, exposure)
def getExposure(self, dataRef)
def getSchemaCatalogs(self)