25import lsst.coadd.utils
as coaddUtils
27from lsst.obs.base
import ExposureIdInfo
29from .references
import MultiBandReferencesTask
30from .forcedMeasurement
import ForcedMeasurementTask
31from .applyApCorr
import ApplyApCorrTask
32from .catalogCalculation
import CatalogCalculationTask
34__all__ = (
"ForcedPhotCoaddConfig",
"ForcedPhotCoaddTask")
38 """Get the psfCache setting into ForcedPhotCoaddTask"""
41 return pipeBase.ButlerInitializedTaskRunner.getTargetList(parsedCmd,
42 psfCache=parsedCmd.psfCache)
46 dimensions=(
"band",
"skymap",
"tract",
"patch"),
47 defaultTemplates={
"inputCoaddName":
"deep",
48 "outputCoaddName":
"deep"}):
49 inputSchema = pipeBase.connectionTypes.InitInput(
50 doc=
"Schema for the input measurement catalogs.",
51 name=
"{inputCoaddName}Coadd_ref_schema",
52 storageClass=
"SourceCatalog",
54 outputSchema = pipeBase.connectionTypes.InitOutput(
55 doc=
"Schema for the output forced measurement catalogs.",
56 name=
"{outputCoaddName}Coadd_forced_src_schema",
57 storageClass=
"SourceCatalog",
59 exposure = pipeBase.connectionTypes.Input(
60 doc=
"Input exposure to perform photometry on.",
61 name=
"{inputCoaddName}Coadd_calexp",
62 storageClass=
"ExposureF",
63 dimensions=[
"band",
"skymap",
"tract",
"patch"],
65 refCat = pipeBase.connectionTypes.Input(
66 doc=
"Catalog of shapes and positions at which to force photometry.",
67 name=
"{inputCoaddName}Coadd_ref",
68 storageClass=
"SourceCatalog",
69 dimensions=[
"skymap",
"tract",
"patch"],
71 refCatInBand = pipeBase.connectionTypes.Input(
72 doc=
"Catalog of shapes and positions in the band having forced photometry done",
73 name=
"{inputCoaddName}Coadd_meas",
74 storageClass=
"SourceCatalog",
75 dimensions=(
"band",
"skymap",
"tract",
"patch")
77 refWcs = pipeBase.connectionTypes.Input(
78 doc=
"Reference world coordinate system.",
79 name=
"{inputCoaddName}Coadd.wcs",
81 dimensions=[
"band",
"skymap",
"tract",
"patch"],
83 measCat = pipeBase.connectionTypes.Output(
84 doc=
"Output forced photometry catalog.",
85 name=
"{outputCoaddName}Coadd_forced_src",
86 storageClass=
"SourceCatalog",
87 dimensions=[
"band",
"skymap",
"tract",
"patch"],
91class ForcedPhotCoaddConfig(pipeBase.PipelineTaskConfig,
92 pipelineConnections=ForcedPhotCoaddConnections):
93 references = lsst.pex.config.ConfigurableField(
94 target=MultiBandReferencesTask,
95 doc=
"subtask to retrieve reference source catalog"
97 measurement = lsst.pex.config.ConfigurableField(
98 target=ForcedMeasurementTask,
99 doc=
"subtask to do forced measurement"
101 coaddName = lsst.pex.config.Field(
102 doc=
"coadd name: typically one of deep or goodSeeing",
106 doApCorr = lsst.pex.config.Field(
109 doc=
"Run subtask to apply aperture corrections"
111 applyApCorr = lsst.pex.config.ConfigurableField(
112 target=ApplyApCorrTask,
113 doc=
"Subtask to apply aperture corrections"
115 catalogCalculation = lsst.pex.config.ConfigurableField(
116 target=CatalogCalculationTask,
117 doc=
"Subtask to run catalogCalculation plugins on catalog"
119 footprintDatasetName = lsst.pex.config.Field(
120 doc=
"Dataset (without coadd prefix) that should be used to obtain (Heavy)Footprints for sources. "
121 "Must have IDs that match those of the reference catalog."
122 "If None, Footprints will be generated by transforming the reference Footprints.",
127 hasFakes = lsst.pex.config.Field(
130 doc=
"Should be set to True if fake sources have been inserted into the input data."
133 def setDefaults(self):
137 super().setDefaults()
139 self.catalogCalculation.plugins.names = []
140 self.measurement.copyColumns[
"id"] =
"id"
141 self.measurement.copyColumns[
"parent"] =
"parent"
142 self.references.removePatchOverlaps =
False
143 self.measurement.plugins.names |= [
'base_InputCount',
'base_Variance']
144 self.measurement.plugins[
'base_PixelFlags'].masksFpAnywhere = [
'CLIPPED',
'SENSOR_EDGE',
145 'REJECTED',
'INEXACT_PSF']
146 self.measurement.plugins[
'base_PixelFlags'].masksFpCenter = [
'CLIPPED',
'SENSOR_EDGE',
147 'REJECTED',
'INEXACT_PSF']
151 if (self.measurement.doReplaceWithNoise
and self.footprintDatasetName
is not None
152 and self.references.removePatchOverlaps):
153 raise ValueError(
"Cannot use removePatchOverlaps=True with deblended footprints, as parent "
154 "sources may be rejected while their children are not.")
157class ForcedPhotCoaddTask(pipeBase.PipelineTask, pipeBase.CmdLineTask):
158 """A command-line driver for performing forced measurement on coadd images.
162 butler : `lsst.daf.persistence.butler.Butler`, optional
163 A Butler which will be passed to the references subtask to allow it to
164 load its schema from disk. Optional, but must be specified
if
165 ``refSchema``
is not;
if both are specified, ``refSchema`` takes
168 The schema of the reference catalog, passed to the constructor of the
169 references subtask. Optional, but must be specified
if ``butler``
is
170 not;
if both are specified, ``refSchema`` takes precedence.
172 Keyword arguments are passed to the supertask constructor.
175 ConfigClass = ForcedPhotCoaddConfig
176 RunnerClass = ForcedPhotCoaddRunner
177 _DefaultName = "forcedPhotCoadd"
178 dataPrefix =
"deepCoadd_"
180 def __init__(self, butler=None, refSchema=None, initInputs=None, **kwds):
181 super().__init__(**kwds)
183 if initInputs
is not None:
184 refSchema = initInputs[
'inputSchema'].schema
186 self.makeSubtask(
"references", butler=butler, schema=refSchema)
187 if refSchema
is None:
188 refSchema = self.references.schema
189 self.makeSubtask(
"measurement", refSchema=refSchema)
192 if self.config.doApCorr:
193 self.makeSubtask(
"applyApCorr", schema=self.measurement.schema)
194 self.makeSubtask(
'catalogCalculation', schema=self.measurement.schema)
197 def runQuantum(self, butlerQC, inputRefs, outputRefs):
198 inputs = butlerQC.get(inputRefs)
200 refCatInBand = inputs.pop(
'refCatInBand')
201 inputs[
'measCat'], inputs[
'exposureId'] = self.generateMeasCat(inputRefs.exposure.dataId,
207 outputs = self.run(**inputs)
208 butlerQC.put(outputs, outputRefs)
210 def generateMeasCat(self, exposureDataId, exposure, refCat, refCatInBand, refWcs, idPackerName):
211 """Generate a measurement catalog for Gen3.
215 exposureDataId : `DataId`
216 Butler dataId for this exposure.
217 exposure : `lsst.afw.image.exposure.Exposure`
218 Exposure to generate the catalog
for.
220 Catalog of shapes
and positions at which to force photometry.
222 Catalog of shapes
and position
in the band forced photometry
is
223 currently being performed
224 refWcs : `lsst.afw.image.SkyWcs`
225 Reference world coordinate system.
227 Type of ID packer to construct
from the registry.
232 Catalog of forced sources to measure.
234 Unique binary id associated
with the input exposure
239 Raised
if a footprint
with a given source id was
in the reference
240 catalog but
not in the reference catalog
in band (meaning there
241 was some sort of mismatch
in the two input catalogs)
243 exposureIdInfo = ExposureIdInfo.fromDataId(exposureDataId, idPackerName)
244 idFactory = exposureIdInfo.makeSourceIdFactory()
246 measCat = self.measurement.generateMeasCat(exposure, refCat, refWcs,
250 for srcRecord
in measCat:
251 fpRecord = refCatInBand.find(srcRecord.getId())
253 raise LookupError(
"Cannot find Footprint for source {}; please check that {} "
254 "IDs are compatible with reference source IDs"
255 .format(srcRecord.getId(), self.config.connections.refCatInBand))
256 srcRecord.setFootprint(fpRecord.getFootprint())
257 return measCat, exposureIdInfo.expId
259 def runDataRef(self, dataRef, psfCache=None):
260 """Perform forced measurement on a single exposure.
264 dataRef : `lsst.daf.persistence.ButlerDataRef`
265 Passed to the ``references`` subtask to obtain the reference WCS,
266 the ``getExposure`` method (implemented by derived classes) to
267 read the measurment image, and the ``fetchReferences`` method to
268 get the exposure
and load the reference catalog (see
271 and data ID keys which are used.
272 psfCache : `int`, optional
273 Size of PSF cache,
or `
None`. The size of the PSF cache can have
274 a significant effect upon the runtime
for complicated PSF models.
278 Sources are generated
with ``generateMeasCat``
in the ``measurement``
279 subtask. These are passed to ``measurement``
's ``run`` method, which
280 fills the source catalog with the forced measurement results. The
281 sources are then passed to the ``writeOutputs`` method (implemented by
282 derived classes) which writes the outputs.
284 refWcs = self.references.getWcs(dataRef)
285 exposure = self.getExposure(dataRef)
286 if psfCache
is not None:
287 exposure.getPsf().setCacheCapacity(psfCache)
288 refCat = self.fetchReferences(dataRef, exposure)
290 exposureId = coaddUtils.getGen3CoaddExposureId(dataRef, coaddName=self.config.coaddName,
291 includeBand=
False, log=self.log)
292 measCat = self.measurement.generateMeasCat(
293 exposure, refCat, refWcs, idFactory=self.makeIdFactory(dataRef, exposureId=exposureId))
294 self.log.info(
"Performing forced measurement on %s", dataRef.dataId)
295 self.attachFootprints(measCat, refCat, exposure, refWcs, dataRef)
297 forcedPhotResult = self.run(measCat, exposure, refCat, refWcs, exposureId=exposureId)
299 self.writeOutput(dataRef, forcedPhotResult.measCat)
301 def run(self, measCat, exposure, refCat, refWcs, exposureId=None):
302 """Perform forced measurement on a single exposure.
307 The measurement catalog, based on the sources listed in the
310 The measurement image upon which to perform forced detection.
312 The reference catalog of sources to measure.
313 refWcs : `lsst.afw.image.SkyWcs`
314 The WCS
for the references.
316 Optional unique exposureId used
for random seed
in measurement
321 result : ~`lsst.pipe.base.Struct`
322 Structure
with fields:
325 Catalog of forced measurement results
328 self.measurement.run(measCat, exposure, refCat, refWcs, exposureId=exposureId)
329 if self.config.doApCorr:
330 self.applyApCorr.run(
332 apCorrMap=exposure.getInfo().getApCorrMap()
334 self.catalogCalculation.run(measCat)
336 return pipeBase.Struct(measCat=measCat)
338 def makeIdFactory(self, dataRef, exposureId):
339 """Create an object that generates globally unique source IDs.
341 Source IDs are created based on a per-CCD ID and the ID of the CCD
346 dataRef : `lsst.daf.persistence.ButlerDataRef`
347 Butler data reference. The
"CoaddId_bits" and "CoaddId" datasets
348 are accessed. The data ID must have tract
and patch keys.
357 exposureIdInfo = ExposureIdInfo(exposureId, dataRef.get(self.config.coaddName +
"CoaddId_bits"))
358 return exposureIdInfo.makeSourceIdFactory()
361 """Return an iterable of reference sources which overlap the exposure.
365 dataRef : `lsst.daf.persistence.ButlerDataRef`
366 Butler data reference corresponding to the image to be measured;
367 should have tract, patch, and filter keys.
374 All work
is delegated to the references subtask; see
375 `CoaddSrcReferencesTask`
for information about the default behavior.
377 skyMap = dataRef.get(self.dataPrefix + "skyMap", immediate=
True)
378 tractInfo = skyMap[dataRef.dataId[
"tract"]]
379 patch = tuple(int(v)
for v
in dataRef.dataId[
"patch"].split(
","))
380 patchInfo = tractInfo.getPatchInfo(patch)
382 references.extend(self.references.fetchInPatches(dataRef, patchList=[patchInfo]))
386 r"""Attach Footprints to source records.
388 For coadd forced photometry, we use the deblended "heavy"
390 of the same band - because we
've guaranteed that the peaks (and hence
391 child sources) will be consistent across all bands before we get to
392 measurement, this should yield reasonable deblending
for most sources.
393 It
's most likely limitation is that it will not provide good flux
394 upper limits for sources that were
not detected
in this band but were
395 blended
with sources that were.
397 if self.config.footprintDatasetName
is None:
398 return self.measurement.attachTransformedFootprints(sources, refCat, exposure, refWcs)
400 self.log.info(
"Loading deblended footprints for sources from %s, %s",
401 self.config.footprintDatasetName, dataRef.dataId)
402 fpCat = dataRef.get(
"%sCoadd_%s" % (self.config.coaddName, self.config.footprintDatasetName),
404 for refRecord, srcRecord
in zip(refCat, sources):
405 fpRecord = fpCat.find(refRecord.getId())
407 raise LookupError(
"Cannot find Footprint for source %s; please check that %sCoadd_%s "
408 "IDs are compatible with reference source IDs" %
409 (srcRecord.getId(), self.config.coaddName,
410 self.config.footprintDatasetName))
411 srcRecord.setFootprint(fpRecord.getFootprint())
414 """Read input exposure on which measurement will be performed.
418 dataRef : `lsst.daf.persistence.ButlerDataRef`
419 Butler data reference.
421 if self.config.hasFakes:
422 name =
"fakes_" + self.config.coaddName +
"Coadd_calexp"
424 name = self.config.coaddName +
"Coadd_calexp"
426 return dataRef.get(name)
if dataRef.datasetExists(name)
else None
429 """Write forced source table
433 dataRef : `lsst.daf.persistence.ButlerDataRef`
434 Butler data reference. The forced_src dataset (with
435 self.dataPrefix prepended)
is all that will be modified.
437 Catalog of sources to save.
439 dataRef.put(sources, self.dataPrefix + "forced_src", flags=lsst.afw.table.SOURCE_IO_NO_FOOTPRINTS)
442 """The schema catalogs that will be used by this task.
446 schemaCatalogs : `dict`
447 Dictionary mapping dataset type to schema catalog.
451 There is only one schema
for each type of forced measurement. The
452 dataset type
for this measurement
is defined
in the mapper.
455 catalog.getTable().setMetadata(self.measurement.algMetadata)
456 datasetType = self.dataPrefix + "forced_src"
457 return {datasetType: catalog}
459 def _getConfigName(self):
461 return self.dataPrefix +
"forced_config"
463 def _getMetadataName(self):
465 return self.dataPrefix +
"forced_metadata"
468 def _makeArgumentParser(cls):
469 parser = pipeBase.ArgumentParser(name=cls._DefaultName)
470 parser.add_id_argument(
"--id",
"deepCoadd_forced_src", help=
"data ID, with raw CCD keys + tract",
471 ContainerClass=coaddUtils.CoaddDataIdContainer)
472 parser.add_argument(
"--psfCache", type=int, default=100, help=
"Size of CoaddPsf cache")
def getTargetList(parsedCmd, **kwargs)
def attachFootprints(self, sources, refCat, exposure, refWcs, dataRef)
def writeOutput(self, dataRef, sources)
def fetchReferences(self, dataRef, exposure)
def getExposure(self, dataRef)
def getSchemaCatalogs(self)