|
lsst.pipe.tasks gfb5511b3f7+6fc9c088ec
|
Classes | |
| class | PostprocessAnalysis |
| class | TransformCatalogBaseConfig |
| class | TransformCatalogBaseConnections |
| class | TransformCatalogBaseTask |
| class | TransformObjectCatalogConnections |
| class | WriteObjectTableConnections |
Functions | |
| flattenFilters (df, noDupCols=['coord_ra', 'coord_dec'], camelCase=False, inputBands=None) | |
Variables | |
| log = logging.getLogger(__name__) | |
| catalogs : `dict` | |
| int | tract |
| str | patch |
| catalog : `pandas.DataFrame` | |
| ccdVisitId : `int` | |
| result : `~lsst.pipe.base.Struct` | |
| inputRefs : `~lsst.pipe.base.InputQuantizedConnection`, for dataIds of | |
| skyMap : `~lsst.skymap.BaseSkyMap` | |
| exposure : `lsst.afw.image.exposure.Exposure` | |
| externalSkyWcsGlobalCatalog : `~lsst.afw.table.ExposureCatalog`, optional | |
| externalSkyWcsTractCatalog : `~lsst.afw.table.ExposureCatalog`, optional | |
| externalPhotoCalibGlobalCatalog : `~lsst.afw.table.ExposureCatalog`, optional | |
| externalPhotoCalibTractCatalog : `~lsst.afw.table.ExposureCatalog`, optional | |
| visitSummary : `lsst.afw.table.ExposureCatalog`, optional | |
| tracts : `list` [`int`] | |
| bbox : `~lsst.geom.Box2I` | |
| wcs : `~lsst.afw.geom.SkyWcs` | |
| index : `int` | |
| detectorId : `int` | |
| externalSkyWcsCatalog : `lsst.afw.table.ExposureCatalog`, optional | |
| doApplyExternalSkyWcs | |
| externalPhotoCalibCatalog : `lsst.afw.table.ExposureCatalog`, optional | |
| doApplyExternalPhotoCalib | |
| names | |
| pluginsNotToCopy = tuple(measureConfig.plugins.names) | |
| aliasMap = catalog.schema.getAliasMap() | |
| mapper = afwTable.SchemaMapper(catalog.schema) | |
| schema = mapper.getOutputSchema() | |
| measurement = SingleFrameMeasurementTask(config=measureConfig, schema=schema) | |
| newCat = afwTable.SourceCatalog(schema) | |
| wcsPlugin = measurement.plugins["base_LocalWcs"] | |
| pcPlugin = measurement.plugins["base_LocalPhotoCalib"] | |
| visit : `int` | |
| dataRefs : `list` of `lsst.daf.butler.DeferredDatasetHandle` | |
| visitSummaryRefs : `list` of `lsst.daf.butler.DeferredDatasetHandle` | |
| visitSummaries : `list` of `lsst.afw.table.ExposureCatalog` | |
| lsst.pipe.tasks.postprocess.flattenFilters | ( | df, | |
| noDupCols = ['coord_ra', 'coord_dec'], | |||
| camelCase = False, | |||
| inputBands = None ) |
Flattens a dataframe with multilevel column index.
Definition at line 64 of file postprocess.py.
| lsst.pipe.tasks.postprocess.aliasMap = catalog.schema.getAliasMap() |
Definition at line 726 of file postprocess.py.
| lsst.pipe.tasks.postprocess.bbox : `~lsst.geom.Box2I` |
Definition at line 569 of file postprocess.py.
| lsst.pipe.tasks.postprocess.catalog : `pandas.DataFrame` |
dfs = []
for filt, tableDict in catalogs.items():
for dataset, table in tableDict.items():
# Convert afwTable to pandas DataFrame
df = table.asAstropy().to_pandas().set_index('id', drop=True)
# Sort columns by name, to ensure matching schema among patches
df = df.reindex(sorted(df.columns), axis=1)
df = df.assign(tractId=tract, patchId=patch)
# Make columns a 3-level MultiIndex
df.columns = pd.MultiIndex.from_tuples([(dataset, filt, c) for c in df.columns],
names=('dataset', 'band', 'column'))
dfs.append(df)
# We do this dance and not `pd.concat(dfs)` because the pandas
# concatenation uses infinite memory.
catalog = functools.reduce(lambda d1, d2: d1.join(d2), dfs)
return catalog
class WriteSourceTableConnections(pipeBase.PipelineTaskConnections,
defaultTemplates={"catalogType": ""},
dimensions=("instrument", "visit", "detector")):
catalog = connectionTypes.Input(
doc="Input full-depth catalog of sources produced by CalibrateTask",
name="{catalogType}src",
storageClass="SourceCatalog",
dimensions=("instrument", "visit", "detector")
)
outputCatalog = connectionTypes.Output(
doc="Catalog of sources, `src` in DataFrame/Parquet format. The 'id' column is "
"replaced with an index; all other columns are unchanged.",
name="{catalogType}source",
storageClass="DataFrame",
dimensions=("instrument", "visit", "detector")
)
class WriteSourceTableConfig(pipeBase.PipelineTaskConfig,
pipelineConnections=WriteSourceTableConnections):
idGenerator = DetectorVisitIdGeneratorConfig.make_field()
class WriteSourceTableTask(pipeBase.PipelineTask):
_DefaultName = "writeSourceTable"
ConfigClass = WriteSourceTableConfig
def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
inputs['ccdVisitId'] = self.config.idGenerator.apply(butlerQC.quantum.dataId).catalog_id
result = self.run(**inputs)
outputs = pipeBase.Struct(outputCatalog=result.table)
butlerQC.put(outputs, outputRefs)
def run(self, catalog, ccdVisitId=None, **kwargs):
Definition at line 178 of file postprocess.py.
| lsst.pipe.tasks.postprocess.catalogs : `dict` |
_DefaultName = "writeObjectTable"
ConfigClass = WriteObjectTableConfig
# Names of table datasets to be merged
inputDatasets = ('forced_src', 'meas', 'ref')
# Tag of output dataset written by `MergeSourcesTask.write`
outputDataset = 'obj'
def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
measDict = {ref.dataId['band']: {'meas': cat} for ref, cat in
zip(inputRefs.inputCatalogMeas, inputs['inputCatalogMeas'])}
forcedSourceDict = {ref.dataId['band']: {'forced_src': cat} for ref, cat in
zip(inputRefs.inputCatalogForcedSrc, inputs['inputCatalogForcedSrc'])}
catalogs = {}
for band in measDict.keys():
catalogs[band] = {'meas': measDict[band]['meas'],
'forced_src': forcedSourceDict[band]['forced_src'],
'ref': inputs['inputCatalogRef']}
dataId = butlerQC.quantum.dataId
df = self.run(catalogs=catalogs, tract=dataId['tract'], patch=dataId['patch'])
outputs = pipeBase.Struct(outputCatalog=df)
butlerQC.put(outputs, outputRefs)
def run(self, catalogs, tract, patch):
Definition at line 169 of file postprocess.py.
| lsst.pipe.tasks.postprocess.ccdVisitId : `int` |
Definition at line 246 of file postprocess.py.
| lsst.pipe.tasks.postprocess.dataRefs : `list` of `lsst.daf.butler.DeferredDatasetHandle` |
Definition at line 1421 of file postprocess.py.
| lsst.pipe.tasks.postprocess.detectorId : `int` |
Definition at line 605 of file postprocess.py.
| lsst.pipe.tasks.postprocess.doApplyExternalPhotoCalib |
Definition at line 614 of file postprocess.py.
| lsst.pipe.tasks.postprocess.doApplyExternalSkyWcs |
Definition at line 609 of file postprocess.py.
| lsst.pipe.tasks.postprocess.exposure : `lsst.afw.image.exposure.Exposure` |
if len(tracts) == 1:
return 0
center = wcs.pixelToSky(bbox.getCenter())
sep = []
for tractId in tracts:
tract = skyMap[tractId]
tractCenter = tract.getWcs().pixelToSky(tract.getBBox().getCenter())
sep.append(center.separation(tractCenter))
return np.argmin(sep)
def prepareCalibratedExposure(
self,
exposure,
detectorId,
externalSkyWcsCatalog=None,
externalPhotoCalibCatalog=None,
visitSummary=None,
):
Definition at line 485 of file postprocess.py.
| lsst.pipe.tasks.postprocess.externalPhotoCalibCatalog : `lsst.afw.table.ExposureCatalog`, optional |
Definition at line 612 of file postprocess.py.
| lsst.pipe.tasks.postprocess.externalPhotoCalibGlobalCatalog : `~lsst.afw.table.ExposureCatalog`, optional |
Definition at line 491 of file postprocess.py.
| lsst.pipe.tasks.postprocess.externalPhotoCalibTractCatalog : `~lsst.afw.table.ExposureCatalog`, optional |
Definition at line 493 of file postprocess.py.
| lsst.pipe.tasks.postprocess.externalSkyWcsCatalog : `lsst.afw.table.ExposureCatalog`, optional |
Definition at line 607 of file postprocess.py.
| lsst.pipe.tasks.postprocess.externalSkyWcsGlobalCatalog : `~lsst.afw.table.ExposureCatalog`, optional |
Definition at line 487 of file postprocess.py.
| lsst.pipe.tasks.postprocess.externalSkyWcsTractCatalog : `~lsst.afw.table.ExposureCatalog`, optional |
Definition at line 489 of file postprocess.py.
| lsst.pipe.tasks.postprocess.index : `int` |
Definition at line 576 of file postprocess.py.
| lsst.pipe.tasks.postprocess.inputRefs : `~lsst.pipe.base.InputQuantizedConnection`, for dataIds of |
self.log.info("Generating DataFrame from src catalog ccdVisitId=%s", ccdVisitId)
df = catalog.asAstropy().to_pandas().set_index('id', drop=True)
df['ccdVisitId'] = ccdVisitId
return pipeBase.Struct(table=df)
class WriteRecalibratedSourceTableConnections(WriteSourceTableConnections,
defaultTemplates={"catalogType": "",
"skyWcsName": "gbdesAstrometricFit",
"photoCalibName": "fgcm"},
# TODO: remove on DM-39854.
deprecatedTemplates={
"skyWcsName": "Deprecated; will be removed after v26.",
"photoCalibName": "Deprecated; will be removed after v26."
},
dimensions=("instrument", "visit", "detector", "skymap")):
skyMap = connectionTypes.Input(
doc="skyMap needed to choose which tract-level calibrations to use when multiple available",
name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
storageClass="SkyMap",
dimensions=("skymap",),
# TODO: remove on DM-39854.
deprecated=(
"Deprecated, since 'visitSummary' already resolves calibrations across tracts. "
"Will be removed after v26."
),
)
exposure = connectionTypes.Input(
doc="Input exposure to perform photometry on.",
name="calexp",
storageClass="ExposureF",
dimensions=["instrument", "visit", "detector"],
# TODO: remove on DM-39584
deprecated=(
"Deprecated, as the `calexp` is not needed and just creates unnecessary i/o. "
"Will be removed after v26."
),
)
visitSummary = connectionTypes.Input(
doc="Input visit-summary catalog with updated calibration objects.",
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=("instrument", "visit",),
)
externalSkyWcsTractCatalog = connectionTypes.Input(
doc=("Per-tract, per-visit wcs calibrations. These catalogs use the detector "
"id for the catalog id, sorted on id for fast lookup."),
name="{skyWcsName}SkyWcsCatalog",
storageClass="ExposureCatalog",
dimensions=["instrument", "visit", "tract"],
multiple=True,
# TODO: remove on DM-39854.
deprecated="Deprecated in favor of 'visitSummary'. Will be removed after v26.",
)
externalSkyWcsGlobalCatalog = connectionTypes.Input(
doc=("Per-visit wcs calibrations computed globally (with no tract information). "
"These catalogs use the detector id for the catalog id, sorted on id for "
"fast lookup."),
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=["instrument", "visit"],
# TODO: remove on DM-39854.
deprecated="Deprecated in favor of 'visitSummary'. Will be removed after v26.",
)
externalPhotoCalibTractCatalog = connectionTypes.Input(
doc=("Per-tract, per-visit photometric calibrations. These catalogs use the "
"detector id for the catalog id, sorted on id for fast lookup."),
name="{photoCalibName}PhotoCalibCatalog",
storageClass="ExposureCatalog",
dimensions=["instrument", "visit", "tract"],
multiple=True,
# TODO: remove on DM-39854.
deprecated="Deprecated in favor of 'visitSummary'. Will be removed after v26.",
)
externalPhotoCalibGlobalCatalog = connectionTypes.Input(
doc=("Per-visit photometric calibrations computed globally (with no tract "
"information). These catalogs use the detector id for the catalog id, "
"sorted on id for fast lookup."),
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=["instrument", "visit"],
# TODO: remove on DM-39854.
deprecated="Deprecated in favor of 'visitSummary'. Will be removed after v26.",
)
def __init__(self, *, config=None):
super().__init__(config=config)
# Same connection boilerplate as all other applications of
# Global/Tract calibrations
# TODO: remove all of this on DM-39854.
keepSkyMap = False
keepExposure = False
if config.doApplyExternalSkyWcs and config.doReevaluateSkyWcs:
keepExposure = True
if config.useGlobalExternalSkyWcs:
self.inputs.remove("externalSkyWcsTractCatalog")
else:
self.inputs.remove("externalSkyWcsGlobalCatalog")
keepSkyMap = True
else:
self.inputs.remove("externalSkyWcsTractCatalog")
self.inputs.remove("externalSkyWcsGlobalCatalog")
if config.doApplyExternalPhotoCalib and config.doReevaluatePhotoCalib:
keepExposure = True
if config.useGlobalExternalPhotoCalib:
self.inputs.remove("externalPhotoCalibTractCatalog")
else:
self.inputs.remove("externalPhotoCalibGlobalCatalog")
keepSkyMap = True
else:
self.inputs.remove("externalPhotoCalibTractCatalog")
self.inputs.remove("externalPhotoCalibGlobalCatalog")
if not keepSkyMap:
del self.skyMap
if not keepExposure:
del self.exposure
class WriteRecalibratedSourceTableConfig(WriteSourceTableConfig,
pipelineConnections=WriteRecalibratedSourceTableConnections):
doReevaluatePhotoCalib = pexConfig.Field(
dtype=bool,
default=True,
doc=("Add or replace local photoCalib columns"),
)
doReevaluateSkyWcs = pexConfig.Field(
dtype=bool,
default=True,
doc=("Add or replace local WCS columns and update the coord columns, coord_ra and coord_dec"),
)
doApplyExternalPhotoCalib = pexConfig.Field(
dtype=bool,
default=False,
doc=("If and only if doReevaluatePhotoCalib, apply the photometric calibrations from an external ",
"algorithm such as FGCM or jointcal, else use the photoCalib already attached to the exposure."),
# TODO: remove on DM-39854.
deprecated="Deprecated along with the external PhotoCalib connections. Will be removed after v26.",
)
doApplyExternalSkyWcs = pexConfig.Field(
dtype=bool,
default=False,
doc=("if and only if doReevaluateSkyWcs, apply the WCS from an external algorithm such as jointcal, ",
"else use the wcs already attached to the exposure."),
# TODO: remove on DM-39854.
deprecated="Deprecated along with the external WCS connections. Will be removed after v26.",
)
useGlobalExternalPhotoCalib = pexConfig.Field(
dtype=bool,
default=False,
doc=("When using doApplyExternalPhotoCalib, use 'global' calibrations "
"that are not run per-tract. When False, use per-tract photometric "
"calibration files."),
# TODO: remove on DM-39854.
deprecated="Deprecated along with the external PhotoCalib connections. Will be removed after v26.",
)
useGlobalExternalSkyWcs = pexConfig.Field(
dtype=bool,
default=False,
doc=("When using doApplyExternalSkyWcs, use 'global' calibrations "
"that are not run per-tract. When False, use per-tract wcs "
"files."),
# TODO: remove on DM-39854.
deprecated="Deprecated along with the external WCS connections. Will be removed after v26.",
)
idGenerator = DetectorVisitIdGeneratorConfig.make_field()
def validate(self):
super().validate()
if self.doApplyExternalSkyWcs and not self.doReevaluateSkyWcs:
log.warning("doApplyExternalSkyWcs=True but doReevaluateSkyWcs=False"
"External SkyWcs will not be read or evaluated.")
if self.doApplyExternalPhotoCalib and not self.doReevaluatePhotoCalib:
log.warning("doApplyExternalPhotoCalib=True but doReevaluatePhotoCalib=False."
"External PhotoCalib will not be read or evaluated.")
class WriteRecalibratedSourceTableTask(WriteSourceTableTask):
_DefaultName = "writeRecalibratedSourceTable"
ConfigClass = WriteRecalibratedSourceTableConfig
def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
idGenerator = self.config.idGenerator.apply(butlerQC.quantum.dataId)
inputs['idGenerator'] = idGenerator
inputs['ccdVisitId'] = idGenerator.catalog_id
if self.config.doReevaluatePhotoCalib or self.config.doReevaluateSkyWcs:
if self.config.doApplyExternalPhotoCalib or self.config.doApplyExternalSkyWcs:
inputs['exposure'] = self.attachCalibs(inputRefs, **inputs)
else:
# Create an empty exposure that will hold the calibrations.
exposure = ExposureF()
detectorId = butlerQC.quantum.dataId["detector"]
inputs['exposure'] = self.prepareCalibratedExposure(
exposure=exposure,
detectorId=detectorId,
visitSummary=inputs["visitSummary"],
)
inputs['catalog'] = self.addCalibColumns(**inputs)
result = self.run(**inputs)
outputs = pipeBase.Struct(outputCatalog=result.table)
butlerQC.put(outputs, outputRefs)
# TODO: remove on DM-39854.
@deprecated("Deprecated in favor of exclusively using visit summaries; will be removed after v26.",
version="v26", category=FutureWarning)
def attachCalibs(self, inputRefs, skyMap, exposure, externalSkyWcsGlobalCatalog=None,
externalSkyWcsTractCatalog=None, externalPhotoCalibGlobalCatalog=None,
externalPhotoCalibTractCatalog=None, visitSummary=None, **kwargs):
Definition at line 481 of file postprocess.py.
| lsst.pipe.tasks.postprocess.log = logging.getLogger(__name__) |
Definition at line 61 of file postprocess.py.
| lsst.pipe.tasks.postprocess.mapper = afwTable.SchemaMapper(catalog.schema) |
Definition at line 727 of file postprocess.py.
| lsst.pipe.tasks.postprocess.measurement = SingleFrameMeasurementTask(config=measureConfig, schema=schema) |
Definition at line 733 of file postprocess.py.
| lsst.pipe.tasks.postprocess.names |
if visitSummary is not None:
row = visitSummary.find(detectorId)
if row is None:
raise RuntimeError(f"Visit summary for detector {detectorId} is unexpectedly missing.")
if (photoCalib := row.getPhotoCalib()) is None:
self.log.warning("Detector id %s has None for photoCalib in visit summary; "
"skipping reevaluation of photoCalib.", detectorId)
exposure.setPhotoCalib(None)
else:
exposure.setPhotoCalib(photoCalib)
if (skyWcs := row.getWcs()) is None:
self.log.warning("Detector id %s has None for skyWcs in visit summary; "
"skipping reevaluation of skyWcs.", detectorId)
exposure.setWcs(None)
else:
exposure.setWcs(skyWcs)
if externalPhotoCalibCatalog is not None:
# TODO: remove on DM-39854.
warnings.warn(
"Deprecated in favor of 'visitSummary'; will be removed after v26.",
FutureWarning,
stacklevel=find_outside_stacklevel("lsst.pipe.tasks.postprocessing"),
)
row = externalPhotoCalibCatalog.find(detectorId)
if row is None:
self.log.warning("Detector id %s not found in externalPhotoCalibCatalog; "
"Using original photoCalib.", detectorId)
else:
photoCalib = row.getPhotoCalib()
if photoCalib is None:
self.log.warning("Detector id %s has None for photoCalib in externalPhotoCalibCatalog; "
"Using original photoCalib.", detectorId)
else:
exposure.setPhotoCalib(photoCalib)
if externalSkyWcsCatalog is not None:
# TODO: remove on DM-39854.
warnings.warn(
"Deprecated in favor of 'visitSummary'; will be removed after v26.",
FutureWarning,
stacklevel=find_outside_stacklevel("lsst.pipe.tasks.postprocessing"),
)
row = externalSkyWcsCatalog.find(detectorId)
if row is None:
self.log.warning("Detector id %s not found in externalSkyWcsCatalog; "
"Using original skyWcs.", detectorId)
else:
skyWcs = row.getWcs()
if skyWcs is None:
self.log.warning("Detector id %s has None for skyWcs in externalSkyWcsCatalog; "
"Using original skyWcs.", detectorId)
else:
exposure.setWcs(skyWcs)
return exposure
def addCalibColumns(self, catalog, exposure, idGenerator, **kwargs):
Definition at line 715 of file postprocess.py.
| lsst.pipe.tasks.postprocess.newCat = afwTable.SourceCatalog(schema) |
Definition at line 735 of file postprocess.py.
| str lsst.pipe.tasks.postprocess.patch |
Definition at line 173 of file postprocess.py.
| lsst.pipe.tasks.postprocess.pcPlugin = measurement.plugins["base_LocalPhotoCalib"] |
Definition at line 749 of file postprocess.py.
| lsst.pipe.tasks.postprocess.pluginsNotToCopy = tuple(measureConfig.plugins.names) |
Definition at line 722 of file postprocess.py.
| lsst.pipe.tasks.postprocess.result : `~lsst.pipe.base.Struct` |
Definition at line 255 of file postprocess.py.
| lsst.pipe.tasks.postprocess.schema = mapper.getOutputSchema() |
Definition at line 732 of file postprocess.py.
| lsst.pipe.tasks.postprocess.skyMap : `~lsst.skymap.BaseSkyMap` |
Definition at line 483 of file postprocess.py.
| int lsst.pipe.tasks.postprocess.tract |
Definition at line 171 of file postprocess.py.
| lsst.pipe.tasks.postprocess.tracts : `list` [`int`] |
if not self.config.doApplyExternalSkyWcs:
# Do not modify the exposure's SkyWcs
externalSkyWcsCatalog = None
elif self.config.useGlobalExternalSkyWcs:
# Use the global external SkyWcs
externalSkyWcsCatalog = externalSkyWcsGlobalCatalog
self.log.info('Applying global SkyWcs')
else:
# use tract-level external SkyWcs from the closest overlapping tract
inputRef = getattr(inputRefs, 'externalSkyWcsTractCatalog')
tracts = [ref.dataId['tract'] for ref in inputRef]
if len(tracts) == 1:
ind = 0
self.log.info('Applying tract-level SkyWcs from tract %s', tracts[ind])
else:
if exposure.getWcs() is None: # TODO: could this look-up use the externalPhotoCalib?
raise ValueError("Trying to locate nearest tract, but exposure.wcs is None.")
ind = self.getClosestTract(tracts, skyMap,
exposure.getBBox(), exposure.getWcs())
self.log.info('Multiple overlapping externalSkyWcsTractCatalogs found (%s). '
'Applying closest to detector center: tract=%s', str(tracts), tracts[ind])
externalSkyWcsCatalog = externalSkyWcsTractCatalog[ind]
if not self.config.doApplyExternalPhotoCalib:
# Do not modify the exposure's PhotoCalib
externalPhotoCalibCatalog = None
elif self.config.useGlobalExternalPhotoCalib:
# Use the global external PhotoCalib
externalPhotoCalibCatalog = externalPhotoCalibGlobalCatalog
self.log.info('Applying global PhotoCalib')
else:
# use tract-level external PhotoCalib from the closest overlapping tract
inputRef = getattr(inputRefs, 'externalPhotoCalibTractCatalog')
tracts = [ref.dataId['tract'] for ref in inputRef]
if len(tracts) == 1:
ind = 0
self.log.info('Applying tract-level PhotoCalib from tract %s', tracts[ind])
else:
ind = self.getClosestTract(tracts, skyMap,
exposure.getBBox(), exposure.getWcs())
self.log.info('Multiple overlapping externalPhotoCalibTractCatalogs found (%s). '
'Applying closest to detector center: tract=%s', str(tracts), tracts[ind])
externalPhotoCalibCatalog = externalPhotoCalibTractCatalog[ind]
return self.prepareCalibratedExposure(
exposure, externalSkyWcsCatalog, externalPhotoCalibCatalog, visitSummary
)
# TODO: remove on DM-39854.
@deprecated("Deprecated in favor of exclusively using visit summaries; will be removed after v26.",
version="v26", category=FutureWarning)
def getClosestTract(self, tracts, skyMap, bbox, wcs):
Definition at line 565 of file postprocess.py.
| lsst.pipe.tasks.postprocess.visit : `int` |
_DefaultName = "transformObjectCatalog"
ConfigClass = TransformObjectCatalogConfig
def run(self, handle, funcs=None, dataId=None, band=None):
# NOTE: band kwarg is ignored here.
dfDict = {}
analysisDict = {}
templateDf = pd.DataFrame()
columns = handle.get(component='columns')
inputBands = columns.unique(level=1).values
outputBands = self.config.outputBands if self.config.outputBands else inputBands
# Perform transform for data of filters that exist in the handle dataframe.
for inputBand in inputBands:
if inputBand not in outputBands:
self.log.info("Ignoring %s band data in the input", inputBand)
continue
self.log.info("Transforming the catalog of band %s", inputBand)
result = self.transform(inputBand, handle, funcs, dataId)
dfDict[inputBand] = result.df
analysisDict[inputBand] = result.analysis
if templateDf.empty:
templateDf = result.df
# Put filler values in columns of other wanted bands
for filt in outputBands:
if filt not in dfDict:
self.log.info("Adding empty columns for band %s", filt)
dfTemp = templateDf.copy()
for col in dfTemp.columns:
testValue = dfTemp[col].values[0]
if isinstance(testValue, (np.bool_, pd.BooleanDtype)):
# Boolean flag type, check if it is a "good" flag
if col in self.config.goodFlags:
fillValue = False
else:
fillValue = True
elif isinstance(testValue, numbers.Integral):
# Checking numbers.Integral catches all flavors
# of python, numpy, pandas, etc. integers.
# We must ensure this is not an unsigned integer.
if isinstance(testValue, np.unsignedinteger):
raise ValueError("Parquet tables may not have unsigned integer columns.")
else:
fillValue = self.config.integerFillValue
else:
fillValue = self.config.floatFillValue
dfTemp[col].values[:] = fillValue
dfDict[filt] = dfTemp
# This makes a multilevel column index, with band as first level
df = pd.concat(dfDict, axis=1, names=['band', 'column'])
if not self.config.multilevelOutput:
noDupCols = list(set.union(*[set(v.noDupCols) for v in analysisDict.values()]))
if self.config.primaryKey in noDupCols:
noDupCols.remove(self.config.primaryKey)
if dataId and self.config.columnsFromDataId:
noDupCols += self.config.columnsFromDataId
df = flattenFilters(df, noDupCols=noDupCols, camelCase=self.config.camelCase,
inputBands=inputBands)
self.log.info("Made a table of %d columns and %d rows", len(df.columns), len(df))
return df
class ConsolidateObjectTableConnections(pipeBase.PipelineTaskConnections,
dimensions=("tract", "skymap")):
inputCatalogs = connectionTypes.Input(
doc="Per-Patch objectTables conforming to the standard data model.",
name="objectTable",
storageClass="DataFrame",
dimensions=("tract", "patch", "skymap"),
multiple=True,
)
outputCatalog = connectionTypes.Output(
doc="Pre-tract horizontal concatenation of the input objectTables",
name="objectTable_tract",
storageClass="DataFrame",
dimensions=("tract", "skymap"),
)
class ConsolidateObjectTableConfig(pipeBase.PipelineTaskConfig,
pipelineConnections=ConsolidateObjectTableConnections):
coaddName = pexConfig.Field(
dtype=str,
default="deep",
doc="Name of coadd"
)
class ConsolidateObjectTableTask(pipeBase.PipelineTask):
_DefaultName = "consolidateObjectTable"
ConfigClass = ConsolidateObjectTableConfig
inputDataset = 'objectTable'
outputDataset = 'objectTable_tract'
def runQuantum(self, butlerQC, inputRefs, outputRefs):
inputs = butlerQC.get(inputRefs)
self.log.info("Concatenating %s per-patch Object Tables",
len(inputs['inputCatalogs']))
df = pd.concat(inputs['inputCatalogs'])
butlerQC.put(pipeBase.Struct(outputCatalog=df), outputRefs)
class TransformSourceTableConnections(pipeBase.PipelineTaskConnections,
defaultTemplates={"catalogType": ""},
dimensions=("instrument", "visit", "detector")):
inputCatalog = connectionTypes.Input(
doc="Wide input catalog of sources produced by WriteSourceTableTask",
name="{catalogType}source",
storageClass="DataFrame",
dimensions=("instrument", "visit", "detector"),
deferLoad=True
)
outputCatalog = connectionTypes.Output(
doc="Narrower, per-detector Source Table transformed and converted per a "
"specified set of functors",
name="{catalogType}sourceTable",
storageClass="DataFrame",
dimensions=("instrument", "visit", "detector")
)
class TransformSourceTableConfig(TransformCatalogBaseConfig,
pipelineConnections=TransformSourceTableConnections):
def setDefaults(self):
super().setDefaults()
self.functorFile = os.path.join('$PIPE_TASKS_DIR', 'schemas', 'Source.yaml')
self.primaryKey = 'sourceId'
self.columnsFromDataId = ['visit', 'detector', 'band', 'physical_filter']
class TransformSourceTableTask(TransformCatalogBaseTask):
_DefaultName = "transformSourceTable"
ConfigClass = TransformSourceTableConfig
class ConsolidateVisitSummaryConnections(pipeBase.PipelineTaskConnections,
dimensions=("instrument", "visit",),
defaultTemplates={"calexpType": ""}):
calexp = connectionTypes.Input(
doc="Processed exposures used for metadata",
name="calexp",
storageClass="ExposureF",
dimensions=("instrument", "visit", "detector"),
deferLoad=True,
multiple=True,
)
visitSummary = connectionTypes.Output(
doc=("Per-visit consolidated exposure metadata. These catalogs use "
"detector id for the id and are sorted for fast lookups of a "
"detector."),
name="visitSummary",
storageClass="ExposureCatalog",
dimensions=("instrument", "visit"),
)
visitSummarySchema = connectionTypes.InitOutput(
doc="Schema of the visitSummary catalog",
name="visitSummary_schema",
storageClass="ExposureCatalog",
)
class ConsolidateVisitSummaryConfig(pipeBase.PipelineTaskConfig,
pipelineConnections=ConsolidateVisitSummaryConnections):
pass class ConsolidateVisitSummaryTask(pipeBase.PipelineTask):
_DefaultName = "consolidateVisitSummary"
ConfigClass = ConsolidateVisitSummaryConfig
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.schema = afwTable.ExposureTable.makeMinimalSchema()
self.schema.addField('visit', type='L', doc='Visit number')
self.schema.addField('physical_filter', type='String', size=32, doc='Physical filter')
self.schema.addField('band', type='String', size=32, doc='Name of band')
ExposureSummaryStats.update_schema(self.schema)
self.visitSummarySchema = afwTable.ExposureCatalog(self.schema)
def runQuantum(self, butlerQC, inputRefs, outputRefs):
dataRefs = butlerQC.get(inputRefs.calexp)
visit = dataRefs[0].dataId['visit']
self.log.debug("Concatenating metadata from %d per-detector calexps (visit %d)",
len(dataRefs), visit)
expCatalog = self._combineExposureMetadata(visit, dataRefs)
butlerQC.put(expCatalog, outputRefs.visitSummary)
def _combineExposureMetadata(self, visit, dataRefs):
Definition at line 1419 of file postprocess.py.
| lsst.pipe.tasks.postprocess.visitSummaries : `list` of `lsst.afw.table.ExposureCatalog` |
ccdEntries = []
for visitSummaryRef in visitSummaryRefs:
visitSummary = visitSummaryRef.get()
visitInfo = visitSummary[0].getVisitInfo()
ccdEntry = {}
summaryTable = visitSummary.asAstropy()
selectColumns = ['id', 'visit', 'physical_filter', 'band', 'ra', 'dec', 'zenithDistance',
'zeroPoint', 'psfSigma', 'skyBg', 'skyNoise',
'astromOffsetMean', 'astromOffsetStd', 'nPsfStar',
'psfStarDeltaE1Median', 'psfStarDeltaE2Median',
'psfStarDeltaE1Scatter', 'psfStarDeltaE2Scatter',
'psfStarDeltaSizeMedian', 'psfStarDeltaSizeScatter',
'psfStarScaledDeltaSizeScatter',
'psfTraceRadiusDelta', 'maxDistToNearestPsf',
'effTime', 'effTimePsfSigmaScale',
'effTimeSkyBgScale', 'effTimeZeroPointScale']
ccdEntry = summaryTable[selectColumns].to_pandas().set_index('id')
# 'visit' is the human readable visit number.
# 'visitId' is the key to the visitId table. They are the same.
# Technically you should join to get the visit from the visit
# table.
ccdEntry = ccdEntry.rename(columns={"visit": "visitId"})
# RFC-924: Temporarily keep a duplicate "decl" entry for backwards
# compatibility. To be removed after September 2023.
ccdEntry["decl"] = ccdEntry.loc[:, "dec"]
ccdEntry['ccdVisitId'] = [
self.config.idGenerator.apply(
visitSummaryRef.dataId,
detector=detector_id,
is_exposure=False,
).catalog_id # The "catalog ID" here is the ccdVisit ID
# because it's usually the ID for a whole catalog
# with a {visit, detector}, and that's the main
# use case for IdGenerator. This usage for a
# summary table is rare.
for detector_id in summaryTable['id']
]
ccdEntry['detector'] = summaryTable['id']
pixToArcseconds = np.array([vR.getWcs().getPixelScale().asArcseconds() if vR.getWcs()
else np.nan for vR in visitSummary])
ccdEntry["seeing"] = visitSummary['psfSigma'] * np.sqrt(8 * np.log(2)) * pixToArcseconds
ccdEntry["skyRotation"] = visitInfo.getBoresightRotAngle().asDegrees()
ccdEntry["expMidpt"] = visitInfo.getDate().toPython()
ccdEntry["expMidptMJD"] = visitInfo.getDate().get(dafBase.DateTime.MJD)
expTime = visitInfo.getExposureTime()
ccdEntry['expTime'] = expTime
ccdEntry["obsStart"] = ccdEntry["expMidpt"] - 0.5 * pd.Timedelta(seconds=expTime)
expTime_days = expTime / (60*60*24)
ccdEntry["obsStartMJD"] = ccdEntry["expMidptMJD"] - 0.5 * expTime_days
ccdEntry['darkTime'] = visitInfo.getDarkTime()
ccdEntry['xSize'] = summaryTable['bbox_max_x'] - summaryTable['bbox_min_x']
ccdEntry['ySize'] = summaryTable['bbox_max_y'] - summaryTable['bbox_min_y']
ccdEntry['llcra'] = summaryTable['raCorners'][:, 0]
ccdEntry['llcdec'] = summaryTable['decCorners'][:, 0]
ccdEntry['ulcra'] = summaryTable['raCorners'][:, 1]
ccdEntry['ulcdec'] = summaryTable['decCorners'][:, 1]
ccdEntry['urcra'] = summaryTable['raCorners'][:, 2]
ccdEntry['urcdec'] = summaryTable['decCorners'][:, 2]
ccdEntry['lrcra'] = summaryTable['raCorners'][:, 3]
ccdEntry['lrcdec'] = summaryTable['decCorners'][:, 3]
# TODO: DM-30618, Add raftName, nExposures, ccdTemp, binX, binY,
# and flags, and decide if WCS, and llcx, llcy, ulcx, ulcy, etc.
# values are actually wanted.
ccdEntries.append(ccdEntry)
outputCatalog = pd.concat(ccdEntries)
outputCatalog.set_index('ccdVisitId', inplace=True, verify_integrity=True)
return pipeBase.Struct(outputCatalog=outputCatalog)
class MakeVisitTableConnections(pipeBase.PipelineTaskConnections,
dimensions=("instrument",),
defaultTemplates={"calexpType": ""}):
visitSummaries = connectionTypes.Input(
doc="Per-visit consolidated exposure metadata",
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=("instrument", "visit",),
multiple=True,
deferLoad=True,
)
outputCatalog = connectionTypes.Output(
doc="Visit metadata table",
name="visitTable",
storageClass="DataFrame",
dimensions=("instrument",)
)
class MakeVisitTableConfig(pipeBase.PipelineTaskConfig,
pipelineConnections=MakeVisitTableConnections):
pass
class MakeVisitTableTask(pipeBase.PipelineTask):
_DefaultName = 'makeVisitTable' ConfigClass = MakeVisitTableConfig def run(self, visitSummaries):
Definition at line 1668 of file postprocess.py.
| lsst.pipe.tasks.postprocess.visitSummary : `lsst.afw.table.ExposureCatalog`, optional |
Definition at line 495 of file postprocess.py.
| lsst.pipe.tasks.postprocess.visitSummaryRefs : `list` of `lsst.daf.butler.DeferredDatasetHandle` |
cat = afwTable.ExposureCatalog(self.schema)
cat.resize(len(dataRefs))
cat['visit'] = visit
for i, dataRef in enumerate(dataRefs):
visitInfo = dataRef.get(component='visitInfo')
filterLabel = dataRef.get(component='filter')
summaryStats = dataRef.get(component='summaryStats')
detector = dataRef.get(component='detector')
wcs = dataRef.get(component='wcs')
photoCalib = dataRef.get(component='photoCalib')
detector = dataRef.get(component='detector')
bbox = dataRef.get(component='bbox')
validPolygon = dataRef.get(component='validPolygon')
rec = cat[i]
rec.setBBox(bbox)
rec.setVisitInfo(visitInfo)
rec.setWcs(wcs)
rec.setPhotoCalib(photoCalib)
rec.setValidPolygon(validPolygon)
rec['physical_filter'] = filterLabel.physicalLabel if filterLabel.hasPhysicalLabel() else ""
rec['band'] = filterLabel.bandLabel if filterLabel.hasBandLabel() else ""
rec.setId(detector.getId())
summaryStats.update_record(rec)
metadata = dafBase.PropertyList()
metadata.add("COMMENT", "Catalog id is detector id, sorted.")
# We are looping over existing datarefs, so the following is true
metadata.add("COMMENT", "Only detectors with data have entries.")
cat.setMetadata(metadata)
cat.sort()
return cat
class ConsolidateSourceTableConnections(pipeBase.PipelineTaskConnections,
defaultTemplates={"catalogType": ""},
dimensions=("instrument", "visit")):
inputCatalogs = connectionTypes.Input(
doc="Input per-detector Source Tables",
name="{catalogType}sourceTable",
storageClass="DataFrame",
dimensions=("instrument", "visit", "detector"),
multiple=True
)
outputCatalog = connectionTypes.Output(
doc="Per-visit concatenation of Source Table",
name="{catalogType}sourceTable_visit",
storageClass="DataFrame",
dimensions=("instrument", "visit")
)
class ConsolidateSourceTableConfig(pipeBase.PipelineTaskConfig,
pipelineConnections=ConsolidateSourceTableConnections):
pass
class ConsolidateSourceTableTask(pipeBase.PipelineTask):
_DefaultName = 'consolidateSourceTable'
ConfigClass = ConsolidateSourceTableConfig
inputDataset = 'sourceTable'
outputDataset = 'sourceTable_visit'
def runQuantum(self, butlerQC, inputRefs, outputRefs):
from .makeWarp import reorderRefs
detectorOrder = [ref.dataId['detector'] for ref in inputRefs.inputCatalogs]
detectorOrder.sort()
inputRefs = reorderRefs(inputRefs, detectorOrder, dataIdKey='detector')
inputs = butlerQC.get(inputRefs)
self.log.info("Concatenating %s per-detector Source Tables",
len(inputs['inputCatalogs']))
df = pd.concat(inputs['inputCatalogs'])
butlerQC.put(pipeBase.Struct(outputCatalog=df), outputRefs)
class MakeCcdVisitTableConnections(pipeBase.PipelineTaskConnections,
dimensions=("instrument",),
defaultTemplates={"calexpType": ""}):
visitSummaryRefs = connectionTypes.Input(
doc="Data references for per-visit consolidated exposure metadata",
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=("instrument", "visit"),
multiple=True,
deferLoad=True,
)
outputCatalog = connectionTypes.Output(
doc="CCD and Visit metadata table",
name="ccdVisitTable",
storageClass="DataFrame",
dimensions=("instrument",)
)
class MakeCcdVisitTableConfig(pipeBase.PipelineTaskConfig,
pipelineConnections=MakeCcdVisitTableConnections):
idGenerator = DetectorVisitIdGeneratorConfig.make_field()
class MakeCcdVisitTableTask(pipeBase.PipelineTask):
_DefaultName = 'makeCcdVisitTable' ConfigClass = MakeCcdVisitTableConfig def run(self, visitSummaryRefs):
Definition at line 1547 of file postprocess.py.
| lsst.pipe.tasks.postprocess.wcs : `~lsst.afw.geom.SkyWcs` |
Definition at line 571 of file postprocess.py.
| lsst.pipe.tasks.postprocess.wcsPlugin = measurement.plugins["base_LocalWcs"] |
Definition at line 744 of file postprocess.py.