lsst.pipe.tasks g59f6dce098+83b7f37e51
Loading...
Searching...
No Matches
Classes | Functions | Variables
lsst.pipe.tasks.postprocess Namespace Reference

Classes

class  PostprocessAnalysis
 
class  TransformCatalogBaseConfig
 
class  TransformCatalogBaseConnections
 
class  TransformCatalogBaseTask
 
class  TransformObjectCatalogConnections
 
class  WriteObjectTableConnections
 

Functions

def flattenFilters (df, noDupCols=['coord_ra', 'coord_dec'], camelCase=False, inputBands=None)
 

Variables

logging log = logging.getLogger(__name__)
 
dict catalogs : `dict`
 
int tract
 
str patch
 
pandas catalog : `pandas.DataFrame`
 
int ccdVisitId : `int`
 
lsst result : `lsst.pipe.base.Struct`
 
lsst inputRefs : `lsst.pipe.base.InputQuantizedConnection`, for dataIds of
 
lsst skyMap : `lsst.skymap.SkyMap`
 
lsst exposure : `lsst.afw.image.exposure.Exposure`
 
lsst externalSkyWcsGlobalCatalog : `lsst.afw.table.ExposureCatalog`, optional
 
lsst externalSkyWcsTractCatalog : `lsst.afw.table.ExposureCatalog`, optional
 
lsst externalPhotoCalibGlobalCatalog : `lsst.afw.table.ExposureCatalog`, optional
 
lsst externalPhotoCalibTractCatalog : `lsst.afw.table.ExposureCatalog`, optional
 
list tracts : `list` [`int`]
 
lsst bbox : `lsst.geom.Box2I`
 
lsst wcs : `lsst.afw.geom.SkyWcs`
 
int index : `int`
 
lsst externalSkyWcsCatalog : `lsst.afw.table.ExposureCatalog`, optional
 
 doApplyExternalSkyWcs
 
lsst externalPhotoCalibCatalog : `lsst.afw.table.ExposureCatalog`, optional
 
 doApplyExternalPhotoCalib
 
 names
 
tuple pluginsNotToCopy = tuple(measureConfig.plugins.names)
 
pandas aliasMap = catalog.schema.getAliasMap()
 
afwTable mapper = afwTable.SchemaMapper(catalog.schema)
 
afwTable schema = mapper.getOutputSchema()
 
SingleFrameMeasurementTask measurement = SingleFrameMeasurementTask(config=measureConfig, schema=schema)
 
afwTable newCat = afwTable.SourceCatalog(schema)
 
 measCat
 
 exposureId
 
int visit : `int`
 
list dataRefs : `list` of `lsst.daf.butler.DeferredDatasetHandle`
 
lsst visitSummary : `lsst.afw.table.ExposureCatalog`
 
list visitSummaryRefs : `list` of `lsst.daf.butler.DeferredDatasetHandle`
 
list visitSummaries : `list` of `lsst.afw.table.ExposureCatalog`
 

Function Documentation

◆ flattenFilters()

def lsst.pipe.tasks.postprocess.flattenFilters (   df,
  noDupCols = ['coord_ra', 'coord_dec'],
  camelCase = False,
  inputBands = None 
)
Flattens a dataframe with multilevel column index.

Definition at line 63 of file postprocess.py.

Variable Documentation

◆ aliasMap

pandas lsst.pipe.tasks.postprocess.aliasMap = catalog.schema.getAliasMap()

Definition at line 599 of file postprocess.py.

◆ bbox

lsst lsst.pipe.tasks.postprocess.bbox : `lsst.geom.Box2I`

Definition at line 488 of file postprocess.py.

◆ catalog

afwTable lsst.pipe.tasks.postprocess.catalog : `pandas.DataFrame`
dfs = []
for filt, tableDict in catalogs.items():
    for dataset, table in tableDict.items():
        # Convert afwTable to pandas DataFrame
        df = table.asAstropy().to_pandas().set_index('id', drop=True)

        # Sort columns by name, to ensure matching schema among patches
        df = df.reindex(sorted(df.columns), axis=1)
        df['tractId'] = tract
        df['patchId'] = patch

        # Make columns a 3-level MultiIndex
        df.columns = pd.MultiIndex.from_tuples([(dataset, filt, c) for c in df.columns],
                                               names=('dataset', 'band', 'column'))
        dfs.append(df)

catalog = functools.reduce(lambda d1, d2: d1.join(d2), dfs)
return catalog


class WriteSourceTableConnections(pipeBase.PipelineTaskConnections,
                          defaultTemplates={"catalogType": ""},
                          dimensions=("instrument", "visit", "detector")):

catalog = connectionTypes.Input(
doc="Input full-depth catalog of sources produced by CalibrateTask",
name="{catalogType}src",
storageClass="SourceCatalog",
dimensions=("instrument", "visit", "detector")
)
outputCatalog = connectionTypes.Output(
doc="Catalog of sources, `src` in Parquet format. The 'id' column is "
    "replaced with an index; all other columns are unchanged.",
name="{catalogType}source",
storageClass="DataFrame",
dimensions=("instrument", "visit", "detector")
)


class WriteSourceTableConfig(pipeBase.PipelineTaskConfig,
                     pipelineConnections=WriteSourceTableConnections):
pass


class WriteSourceTableTask(pipeBase.PipelineTask):
_DefaultName = "writeSourceTable"
ConfigClass = WriteSourceTableConfig

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)
    inputs['ccdVisitId'] = butlerQC.quantum.dataId.pack("visit_detector")
    result = self.run(**inputs).table
    outputs = pipeBase.Struct(outputCatalog=result.toDataFrame())
    butlerQC.put(outputs, outputRefs)

def run(self, catalog, ccdVisitId=None, **kwargs):

Definition at line 176 of file postprocess.py.

◆ catalogs

dict lsst.pipe.tasks.postprocess.catalogs : `dict`
_DefaultName = "writeObjectTable"
ConfigClass = WriteObjectTableConfig

# Names of table datasets to be merged
inputDatasets = ('forced_src', 'meas', 'ref')

# Tag of output dataset written by `MergeSourcesTask.write`
outputDataset = 'obj'

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)

    measDict = {ref.dataId['band']: {'meas': cat} for ref, cat in
                zip(inputRefs.inputCatalogMeas, inputs['inputCatalogMeas'])}
    forcedSourceDict = {ref.dataId['band']: {'forced_src': cat} for ref, cat in
                        zip(inputRefs.inputCatalogForcedSrc, inputs['inputCatalogForcedSrc'])}

    catalogs = {}
    for band in measDict.keys():
        catalogs[band] = {'meas': measDict[band]['meas'],
                          'forced_src': forcedSourceDict[band]['forced_src'],
                          'ref': inputs['inputCatalogRef']}
    dataId = butlerQC.quantum.dataId
    df = self.run(catalogs=catalogs, tract=dataId['tract'], patch=dataId['patch'])
    outputs = pipeBase.Struct(outputCatalog=df)
    butlerQC.put(outputs, outputRefs)

def run(self, catalogs, tract, patch):

Definition at line 167 of file postprocess.py.

◆ ccdVisitId

int lsst.pipe.tasks.postprocess.ccdVisitId : `int`

Definition at line 244 of file postprocess.py.

◆ dataRefs

list lsst.pipe.tasks.postprocess.dataRefs : `list` of `lsst.daf.butler.DeferredDatasetHandle`

Definition at line 1292 of file postprocess.py.

◆ doApplyExternalPhotoCalib

lsst.pipe.tasks.postprocess.doApplyExternalPhotoCalib

Definition at line 523 of file postprocess.py.

◆ doApplyExternalSkyWcs

lsst.pipe.tasks.postprocess.doApplyExternalSkyWcs

Definition at line 519 of file postprocess.py.

◆ exposure

lsst lsst.pipe.tasks.postprocess.exposure : `lsst.afw.image.exposure.Exposure`
if len(tracts) == 1:
    return 0

center = wcs.pixelToSky(bbox.getCenter())
sep = []
for tractId in tracts:
    tract = skyMap[tractId]
    tractCenter = tract.getWcs().pixelToSky(tract.getBBox().getCenter())
    sep.append(center.separation(tractCenter))

return np.argmin(sep)

def prepareCalibratedExposure(self, exposure, externalSkyWcsCatalog=None, externalPhotoCalibCatalog=None):

Definition at line 415 of file postprocess.py.

◆ exposureId

lsst.pipe.tasks.postprocess.exposureId

Definition at line 618 of file postprocess.py.

◆ externalPhotoCalibCatalog

lsst lsst.pipe.tasks.postprocess.externalPhotoCalibCatalog : `lsst.afw.table.ExposureCatalog`, optional

Definition at line 521 of file postprocess.py.

◆ externalPhotoCalibGlobalCatalog

lsst lsst.pipe.tasks.postprocess.externalPhotoCalibGlobalCatalog : `lsst.afw.table.ExposureCatalog`, optional

Definition at line 421 of file postprocess.py.

◆ externalPhotoCalibTractCatalog

lsst lsst.pipe.tasks.postprocess.externalPhotoCalibTractCatalog : `lsst.afw.table.ExposureCatalog`, optional

Definition at line 423 of file postprocess.py.

◆ externalSkyWcsCatalog

lsst lsst.pipe.tasks.postprocess.externalSkyWcsCatalog : `lsst.afw.table.ExposureCatalog`, optional

Definition at line 517 of file postprocess.py.

◆ externalSkyWcsGlobalCatalog

lsst lsst.pipe.tasks.postprocess.externalSkyWcsGlobalCatalog : `lsst.afw.table.ExposureCatalog`, optional

Definition at line 417 of file postprocess.py.

◆ externalSkyWcsTractCatalog

lsst lsst.pipe.tasks.postprocess.externalSkyWcsTractCatalog : `lsst.afw.table.ExposureCatalog`, optional

Definition at line 419 of file postprocess.py.

◆ index

int lsst.pipe.tasks.postprocess.index : `int`

Definition at line 495 of file postprocess.py.

◆ inputRefs

lsst lsst.pipe.tasks.postprocess.inputRefs : `lsst.pipe.base.InputQuantizedConnection`, for dataIds of
self.log.info("Generating parquet table from src catalog ccdVisitId=%s", ccdVisitId)
df = catalog.asAstropy().to_pandas().set_index('id', drop=True)
df['ccdVisitId'] = ccdVisitId
return pipeBase.Struct(table=ParquetTable(dataFrame=df))


class WriteRecalibratedSourceTableConnections(WriteSourceTableConnections,
                                      defaultTemplates={"catalogType": "",
                                                        "skyWcsName": "gbdesAstrometricFit",
                                                        "photoCalibName": "fgcm"},
                                      dimensions=("instrument", "visit", "detector", "skymap")):
skyMap = connectionTypes.Input(
doc="skyMap needed to choose which tract-level calibrations to use when multiple available",
name=BaseSkyMap.SKYMAP_DATASET_TYPE_NAME,
storageClass="SkyMap",
dimensions=("skymap",),
)
exposure = connectionTypes.Input(
doc="Input exposure to perform photometry on.",
name="calexp",
storageClass="ExposureF",
dimensions=["instrument", "visit", "detector"],
)
externalSkyWcsTractCatalog = connectionTypes.Input(
doc=("Per-tract, per-visit wcs calibrations.  These catalogs use the detector "
     "id for the catalog id, sorted on id for fast lookup."),
name="{skyWcsName}SkyWcsCatalog",
storageClass="ExposureCatalog",
dimensions=["instrument", "visit", "tract"],
multiple=True
)
externalSkyWcsGlobalCatalog = connectionTypes.Input(
doc=("Per-visit wcs calibrations computed globally (with no tract information). "
     "These catalogs use the detector id for the catalog id, sorted on id for "
     "fast lookup."),
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=["instrument", "visit"],
)
externalPhotoCalibTractCatalog = connectionTypes.Input(
doc=("Per-tract, per-visit photometric calibrations.  These catalogs use the "
     "detector id for the catalog id, sorted on id for fast lookup."),
name="{photoCalibName}PhotoCalibCatalog",
storageClass="ExposureCatalog",
dimensions=["instrument", "visit", "tract"],
multiple=True
)
externalPhotoCalibGlobalCatalog = connectionTypes.Input(
doc=("Per-visit photometric calibrations computed globally (with no tract "
     "information).  These catalogs use the detector id for the catalog id, "
     "sorted on id for fast lookup."),
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=["instrument", "visit"],
)

def __init__(self, *, config=None):
super().__init__(config=config)
# Same connection boilerplate as all other applications of
# Global/Tract calibrations
if config.doApplyExternalSkyWcs and config.doReevaluateSkyWcs:
    if config.useGlobalExternalSkyWcs:
        self.inputs.remove("externalSkyWcsTractCatalog")
    else:
        self.inputs.remove("externalSkyWcsGlobalCatalog")
else:
    self.inputs.remove("externalSkyWcsTractCatalog")
    self.inputs.remove("externalSkyWcsGlobalCatalog")
if config.doApplyExternalPhotoCalib and config.doReevaluatePhotoCalib:
    if config.useGlobalExternalPhotoCalib:
        self.inputs.remove("externalPhotoCalibTractCatalog")
    else:
        self.inputs.remove("externalPhotoCalibGlobalCatalog")
else:
    self.inputs.remove("externalPhotoCalibTractCatalog")
    self.inputs.remove("externalPhotoCalibGlobalCatalog")


class WriteRecalibratedSourceTableConfig(WriteSourceTableConfig,
                                 pipelineConnections=WriteRecalibratedSourceTableConnections):

doReevaluatePhotoCalib = pexConfig.Field(
dtype=bool,
default=True,
doc=("Add or replace local photoCalib columns")
)
doReevaluateSkyWcs = pexConfig.Field(
dtype=bool,
default=True,
doc=("Add or replace local WCS columns and update the coord columns, coord_ra and coord_dec")
)
doApplyExternalPhotoCalib = pexConfig.Field(
dtype=bool,
default=True,
doc=("If and only if doReevaluatePhotoCalib, apply the photometric calibrations from an external ",
     "algorithm such as FGCM or jointcal, else use the photoCalib already attached to the exposure."),
)
doApplyExternalSkyWcs = pexConfig.Field(
dtype=bool,
default=True,
doc=("if and only if doReevaluateSkyWcs, apply the WCS from an external algorithm such as jointcal, ",
     "else use the wcs already attached to the exposure."),
)
useGlobalExternalPhotoCalib = pexConfig.Field(
dtype=bool,
default=True,
doc=("When using doApplyExternalPhotoCalib, use 'global' calibrations "
     "that are not run per-tract.  When False, use per-tract photometric "
     "calibration files.")
)
useGlobalExternalSkyWcs = pexConfig.Field(
dtype=bool,
default=True,
doc=("When using doApplyExternalSkyWcs, use 'global' calibrations "
     "that are not run per-tract.  When False, use per-tract wcs "
     "files.")
)

def validate(self):
super().validate()
if self.doApplyExternalSkyWcs and not self.doReevaluateSkyWcs:
    log.warning("doApplyExternalSkyWcs=True but doReevaluateSkyWcs=False"
                "External SkyWcs will not be read or evaluated.")
if self.doApplyExternalPhotoCalib and not self.doReevaluatePhotoCalib:
    log.warning("doApplyExternalPhotoCalib=True but doReevaluatePhotoCalib=False."
                "External PhotoCalib will not be read or evaluated.")


class WriteRecalibratedSourceTableTask(WriteSourceTableTask):
_DefaultName = "writeRecalibratedSourceTable"
ConfigClass = WriteRecalibratedSourceTableConfig

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)
    inputs['ccdVisitId'] = butlerQC.quantum.dataId.pack("visit_detector")
    inputs['exposureIdInfo'] = ExposureIdInfo.fromDataId(butlerQC.quantum.dataId, "visit_detector")

    if self.config.doReevaluatePhotoCalib or self.config.doReevaluateSkyWcs:
        if self.config.doApplyExternalPhotoCalib or self.config.doApplyExternalSkyWcs:
            inputs['exposure'] = self.attachCalibs(inputRefs, **inputs)

        inputs['catalog'] = self.addCalibColumns(**inputs)

    result = self.run(**inputs).table
    outputs = pipeBase.Struct(outputCatalog=result.toDataFrame())
    butlerQC.put(outputs, outputRefs)

def attachCalibs(self, inputRefs, skyMap, exposure, externalSkyWcsGlobalCatalog=None,
                 externalSkyWcsTractCatalog=None, externalPhotoCalibGlobalCatalog=None,
                 externalPhotoCalibTractCatalog=None, **kwargs):

Definition at line 412 of file postprocess.py.

◆ log

logging lsst.pipe.tasks.postprocess.log = logging.getLogger(__name__)

Definition at line 60 of file postprocess.py.

◆ mapper

lsst.pipe.tasks.postprocess.mapper = afwTable.SchemaMapper(catalog.schema)

Definition at line 600 of file postprocess.py.

◆ measCat

lsst.pipe.tasks.postprocess.measCat

Definition at line 618 of file postprocess.py.

◆ measurement

SingleFrameMeasurementTask lsst.pipe.tasks.postprocess.measurement = SingleFrameMeasurementTask(config=measureConfig, schema=schema)

Definition at line 606 of file postprocess.py.

◆ names

lsst.pipe.tasks.postprocess.names
detectorId = exposure.getInfo().getDetector().getId()

if externalPhotoCalibCatalog is not None:
    row = externalPhotoCalibCatalog.find(detectorId)
    if row is None:
        self.log.warning("Detector id %s not found in externalPhotoCalibCatalog; "
                         "Using original photoCalib.", detectorId)
    else:
        photoCalib = row.getPhotoCalib()
        if photoCalib is None:
            self.log.warning("Detector id %s has None for photoCalib in externalPhotoCalibCatalog; "
                             "Using original photoCalib.", detectorId)
        else:
            exposure.setPhotoCalib(photoCalib)

if externalSkyWcsCatalog is not None:
    row = externalSkyWcsCatalog.find(detectorId)
    if row is None:
        self.log.warning("Detector id %s not found in externalSkyWcsCatalog; "
                         "Using original skyWcs.", detectorId)
    else:
        skyWcs = row.getWcs()
        if skyWcs is None:
            self.log.warning("Detector id %s has None for skyWcs in externalSkyWcsCatalog; "
                             "Using original skyWcs.", detectorId)
        else:
            exposure.setWcs(skyWcs)

return exposure

def addCalibColumns(self, catalog, exposure, exposureIdInfo, **kwargs):

Definition at line 588 of file postprocess.py.

◆ newCat

lsst.pipe.tasks.postprocess.newCat = afwTable.SourceCatalog(schema)

Definition at line 608 of file postprocess.py.

◆ patch

str lsst.pipe.tasks.postprocess.patch

Definition at line 171 of file postprocess.py.

◆ pluginsNotToCopy

tuple lsst.pipe.tasks.postprocess.pluginsNotToCopy = tuple(measureConfig.plugins.names)

Definition at line 595 of file postprocess.py.

◆ result

lsst lsst.pipe.tasks.postprocess.result : `lsst.pipe.base.Struct`

Definition at line 249 of file postprocess.py.

◆ schema

afwTable lsst.pipe.tasks.postprocess.schema = mapper.getOutputSchema()

Definition at line 605 of file postprocess.py.

◆ skyMap

lsst lsst.pipe.tasks.postprocess.skyMap : `lsst.skymap.SkyMap`

Definition at line 414 of file postprocess.py.

◆ tract

int lsst.pipe.tasks.postprocess.tract

Definition at line 169 of file postprocess.py.

◆ tracts

list lsst.pipe.tasks.postprocess.tracts : `list` [`int`]
if not self.config.doApplyExternalSkyWcs:
    # Do not modify the exposure's SkyWcs
    externalSkyWcsCatalog = None
elif self.config.useGlobalExternalSkyWcs:
    # Use the global external SkyWcs
    externalSkyWcsCatalog = externalSkyWcsGlobalCatalog
    self.log.info('Applying global SkyWcs')
else:
    # use tract-level external SkyWcs from the closest overlapping tract
    inputRef = getattr(inputRefs, 'externalSkyWcsTractCatalog')
    tracts = [ref.dataId['tract'] for ref in inputRef]
    if len(tracts) == 1:
        ind = 0
        self.log.info('Applying tract-level SkyWcs from tract %s', tracts[ind])
    else:
        if exposure.getWcs() is None:  # TODO: could this look-up use the externalPhotoCalib?
            raise ValueError("Trying to locate nearest tract, but exposure.wcs is None.")
        ind = self.getClosestTract(tracts, skyMap,
                                   exposure.getBBox(), exposure.getWcs())
        self.log.info('Multiple overlapping externalSkyWcsTractCatalogs found (%s). '
                      'Applying closest to detector center: tract=%s', str(tracts), tracts[ind])

    externalSkyWcsCatalog = externalSkyWcsTractCatalog[ind]

if not self.config.doApplyExternalPhotoCalib:
    # Do not modify the exposure's PhotoCalib
    externalPhotoCalibCatalog = None
elif self.config.useGlobalExternalPhotoCalib:
    # Use the global external PhotoCalib
    externalPhotoCalibCatalog = externalPhotoCalibGlobalCatalog
    self.log.info('Applying global PhotoCalib')
else:
    # use tract-level external PhotoCalib from the closest overlapping tract
    inputRef = getattr(inputRefs, 'externalPhotoCalibTractCatalog')
    tracts = [ref.dataId['tract'] for ref in inputRef]
    if len(tracts) == 1:
        ind = 0
        self.log.info('Applying tract-level PhotoCalib from tract %s', tracts[ind])
    else:
        ind = self.getClosestTract(tracts, skyMap,
                                   exposure.getBBox(), exposure.getWcs())
        self.log.info('Multiple overlapping externalPhotoCalibTractCatalogs found (%s). '
                      'Applying closest to detector center: tract=%s', str(tracts), tracts[ind])

    externalPhotoCalibCatalog = externalPhotoCalibTractCatalog[ind]

return self.prepareCalibratedExposure(exposure, externalSkyWcsCatalog, externalPhotoCalibCatalog)

def getClosestTract(self, tracts, skyMap, bbox, wcs):

Definition at line 484 of file postprocess.py.

◆ visit

int lsst.pipe.tasks.postprocess.visit : `int`
_DefaultName = "transformObjectCatalog"
ConfigClass = TransformObjectCatalogConfig

def run(self, parq, funcs=None, dataId=None, band=None):
    # NOTE: band kwarg is ignored here.
    dfDict = {}
    analysisDict = {}
    templateDf = pd.DataFrame()

    if isinstance(parq, DeferredDatasetHandle):
        columns = parq.get(component='columns')
        inputBands = columns.unique(level=1).values
    else:
        inputBands = parq.columnLevelNames['band']

    outputBands = self.config.outputBands if self.config.outputBands else inputBands

    # Perform transform for data of filters that exist in parq.
    for inputBand in inputBands:
        if inputBand not in outputBands:
            self.log.info("Ignoring %s band data in the input", inputBand)
            continue
        self.log.info("Transforming the catalog of band %s", inputBand)
        result = self.transform(inputBand, parq, funcs, dataId)
        dfDict[inputBand] = result.df
        analysisDict[inputBand] = result.analysis
        if templateDf.empty:
            templateDf = result.df

    # Put filler values in columns of other wanted bands
    for filt in outputBands:
        if filt not in dfDict:
            self.log.info("Adding empty columns for band %s", filt)
            dfTemp = templateDf.copy()
            for col in dfTemp.columns:
                testValue = dfTemp[col].values[0]
                if isinstance(testValue, (np.bool_, pd.BooleanDtype)):
                    # Boolean flag type, check if it is a "good" flag
                    if col in self.config.goodFlags:
                        fillValue = False
                    else:
                        fillValue = True
                elif isinstance(testValue, numbers.Integral):
                    # Checking numbers.Integral catches all flavors
                    # of python, numpy, pandas, etc. integers.
                    # We must ensure this is not an unsigned integer.
                    if isinstance(testValue, np.unsignedinteger):
                        raise ValueError("Parquet tables may not have unsigned integer columns.")
                    else:
                        fillValue = self.config.integerFillValue
                else:
                    fillValue = self.config.floatFillValue
                dfTemp[col].values[:] = fillValue
            dfDict[filt] = dfTemp

    # This makes a multilevel column index, with band as first level
    df = pd.concat(dfDict, axis=1, names=['band', 'column'])

    if not self.config.multilevelOutput:
        noDupCols = list(set.union(*[set(v.noDupCols) for v in analysisDict.values()]))
        if self.config.primaryKey in noDupCols:
            noDupCols.remove(self.config.primaryKey)
        if dataId and self.config.columnsFromDataId:
            noDupCols += self.config.columnsFromDataId
        df = flattenFilters(df, noDupCols=noDupCols, camelCase=self.config.camelCase,
                            inputBands=inputBands)

    self.log.info("Made a table of %d columns and %d rows", len(df.columns), len(df))

    return df


class ConsolidateObjectTableConnections(pipeBase.PipelineTaskConnections,
                                    dimensions=("tract", "skymap")):
inputCatalogs = connectionTypes.Input(
    doc="Per-Patch objectTables conforming to the standard data model.",
    name="objectTable",
    storageClass="DataFrame",
    dimensions=("tract", "patch", "skymap"),
    multiple=True,
)
outputCatalog = connectionTypes.Output(
    doc="Pre-tract horizontal concatenation of the input objectTables",
    name="objectTable_tract",
    storageClass="DataFrame",
    dimensions=("tract", "skymap"),
)


class ConsolidateObjectTableConfig(pipeBase.PipelineTaskConfig,
                               pipelineConnections=ConsolidateObjectTableConnections):
coaddName = pexConfig.Field(
    dtype=str,
    default="deep",
    doc="Name of coadd"
)


class ConsolidateObjectTableTask(pipeBase.PipelineTask):
_DefaultName = "consolidateObjectTable"
ConfigClass = ConsolidateObjectTableConfig

inputDataset = 'objectTable'
outputDataset = 'objectTable_tract'

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    inputs = butlerQC.get(inputRefs)
    self.log.info("Concatenating %s per-patch Object Tables",
                  len(inputs['inputCatalogs']))
    df = pd.concat(inputs['inputCatalogs'])
    butlerQC.put(pipeBase.Struct(outputCatalog=df), outputRefs)


class TransformSourceTableConnections(pipeBase.PipelineTaskConnections,
                                  defaultTemplates={"catalogType": ""},
                                  dimensions=("instrument", "visit", "detector")):

inputCatalog = connectionTypes.Input(
    doc="Wide input catalog of sources produced by WriteSourceTableTask",
    name="{catalogType}source",
    storageClass="DataFrame",
    dimensions=("instrument", "visit", "detector"),
    deferLoad=True
)
outputCatalog = connectionTypes.Output(
    doc="Narrower, per-detector Source Table transformed and converted per a "
        "specified set of functors",
    name="{catalogType}sourceTable",
    storageClass="DataFrame",
    dimensions=("instrument", "visit", "detector")
)


class TransformSourceTableConfig(TransformCatalogBaseConfig,
                             pipelineConnections=TransformSourceTableConnections):

def setDefaults(self):
    super().setDefaults()
    self.functorFile = os.path.join('$PIPE_TASKS_DIR', 'schemas', 'Source.yaml')
    self.primaryKey = 'sourceId'
    self.columnsFromDataId = ['visit', 'detector', 'band', 'physical_filter']


class TransformSourceTableTask(TransformCatalogBaseTask):
_DefaultName = "transformSourceTable"
ConfigClass = TransformSourceTableConfig


class ConsolidateVisitSummaryConnections(pipeBase.PipelineTaskConnections,
                                     dimensions=("instrument", "visit",),
                                     defaultTemplates={"calexpType": ""}):
calexp = connectionTypes.Input(
    doc="Processed exposures used for metadata",
    name="calexp",
    storageClass="ExposureF",
    dimensions=("instrument", "visit", "detector"),
    deferLoad=True,
    multiple=True,
)
visitSummary = connectionTypes.Output(
    doc=("Per-visit consolidated exposure metadata.  These catalogs use "
         "detector id for the id and are sorted for fast lookups of a "
         "detector."),
    name="visitSummary",
    storageClass="ExposureCatalog",
    dimensions=("instrument", "visit"),
)
visitSummarySchema = connectionTypes.InitOutput(
    doc="Schema of the visitSummary catalog",
    name="visitSummary_schema",
    storageClass="ExposureCatalog",
)


class ConsolidateVisitSummaryConfig(pipeBase.PipelineTaskConfig,
                                pipelineConnections=ConsolidateVisitSummaryConnections):
pass


class ConsolidateVisitSummaryTask(pipeBase.PipelineTask):
_DefaultName = "consolidateVisitSummary"
ConfigClass = ConsolidateVisitSummaryConfig

def __init__(self, **kwargs):
    super().__init__(**kwargs)
    self.schema = afwTable.ExposureTable.makeMinimalSchema()
    self.schema.addField('visit', type='L', doc='Visit number')
    self.schema.addField('physical_filter', type='String', size=32, doc='Physical filter')
    self.schema.addField('band', type='String', size=32, doc='Name of band')
    ExposureSummaryStats.update_schema(self.schema)
    self.visitSummarySchema = afwTable.ExposureCatalog(self.schema)

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    dataRefs = butlerQC.get(inputRefs.calexp)
    visit = dataRefs[0].dataId.byName()['visit']

    self.log.debug("Concatenating metadata from %d per-detector calexps (visit %d)",
                   len(dataRefs), visit)

    expCatalog = self._combineExposureMetadata(visit, dataRefs)

    butlerQC.put(expCatalog, outputRefs.visitSummary)

def _combineExposureMetadata(self, visit, dataRefs):

Definition at line 1290 of file postprocess.py.

◆ visitSummaries

list lsst.pipe.tasks.postprocess.visitSummaries : `list` of `lsst.afw.table.ExposureCatalog`
ccdEntries = []
for visitSummaryRef in visitSummaryRefs:
    visitSummary = visitSummaryRef.get()
    visitInfo = visitSummary[0].getVisitInfo()

    ccdEntry = {}
    summaryTable = visitSummary.asAstropy()
    selectColumns = ['id', 'visit', 'physical_filter', 'band', 'ra', 'decl', 'zenithDistance',
                     'zeroPoint', 'psfSigma', 'skyBg', 'skyNoise',
                     'astromOffsetMean', 'astromOffsetStd', 'nPsfStar',
                     'psfStarDeltaE1Median', 'psfStarDeltaE2Median',
                     'psfStarDeltaE1Scatter', 'psfStarDeltaE2Scatter',
                     'psfStarDeltaSizeMedian', 'psfStarDeltaSizeScatter',
                     'psfStarScaledDeltaSizeScatter',
                     'psfTraceRadiusDelta', 'maxDistToNearestPsf']
    ccdEntry = summaryTable[selectColumns].to_pandas().set_index('id')
    # 'visit' is the human readable visit number.
    # 'visitId' is the key to the visitId table. They are the same.
    # Technically you should join to get the visit from the visit
    # table.
    ccdEntry = ccdEntry.rename(columns={"visit": "visitId"})
    dataIds = [DataCoordinate.standardize(visitSummaryRef.dataId, detector=id) for id in
               summaryTable['id']]
    packer = visitSummaryRef.dataId.universe.makePacker('visit_detector', visitSummaryRef.dataId)
    ccdVisitIds = [packer.pack(dataId) for dataId in dataIds]
    ccdEntry['ccdVisitId'] = ccdVisitIds
    ccdEntry['detector'] = summaryTable['id']
    pixToArcseconds = np.array([vR.getWcs().getPixelScale().asArcseconds() if vR.getWcs()
                                else np.nan for vR in visitSummary])
    ccdEntry["seeing"] = visitSummary['psfSigma'] * np.sqrt(8 * np.log(2)) * pixToArcseconds

    ccdEntry["skyRotation"] = visitInfo.getBoresightRotAngle().asDegrees()
    ccdEntry["expMidpt"] = visitInfo.getDate().toPython()
    ccdEntry["expMidptMJD"] = visitInfo.getDate().get(dafBase.DateTime.MJD)
    expTime = visitInfo.getExposureTime()
    ccdEntry['expTime'] = expTime
    ccdEntry["obsStart"] = ccdEntry["expMidpt"] - 0.5 * pd.Timedelta(seconds=expTime)
    expTime_days = expTime / (60*60*24)
    ccdEntry["obsStartMJD"] = ccdEntry["expMidptMJD"] - 0.5 * expTime_days
    ccdEntry['darkTime'] = visitInfo.getDarkTime()
    ccdEntry['xSize'] = summaryTable['bbox_max_x'] - summaryTable['bbox_min_x']
    ccdEntry['ySize'] = summaryTable['bbox_max_y'] - summaryTable['bbox_min_y']
    ccdEntry['llcra'] = summaryTable['raCorners'][:, 0]
    ccdEntry['llcdec'] = summaryTable['decCorners'][:, 0]
    ccdEntry['ulcra'] = summaryTable['raCorners'][:, 1]
    ccdEntry['ulcdec'] = summaryTable['decCorners'][:, 1]
    ccdEntry['urcra'] = summaryTable['raCorners'][:, 2]
    ccdEntry['urcdec'] = summaryTable['decCorners'][:, 2]
    ccdEntry['lrcra'] = summaryTable['raCorners'][:, 3]
    ccdEntry['lrcdec'] = summaryTable['decCorners'][:, 3]
    # TODO: DM-30618, Add raftName, nExposures, ccdTemp, binX, binY,
    # and flags, and decide if WCS, and llcx, llcy, ulcx, ulcy, etc.
    # values are actually wanted.
    ccdEntries.append(ccdEntry)

outputCatalog = pd.concat(ccdEntries)
outputCatalog.set_index('ccdVisitId', inplace=True, verify_integrity=True)
return pipeBase.Struct(outputCatalog=outputCatalog)


class MakeVisitTableConnections(pipeBase.PipelineTaskConnections,
                        dimensions=("instrument",),
                        defaultTemplates={"calexpType": ""}):
visitSummaries = connectionTypes.Input(
doc="Per-visit consolidated exposure metadata",
name="finalVisitSummary",
storageClass="ExposureCatalog",
dimensions=("instrument", "visit",),
multiple=True,
deferLoad=True,
)
outputCatalog = connectionTypes.Output(
doc="Visit metadata table",
name="visitTable",
storageClass="DataFrame",
dimensions=("instrument",)
)


class MakeVisitTableConfig(pipeBase.PipelineTaskConfig,
                   pipelineConnections=MakeVisitTableConnections):
pass


class MakeVisitTableTask(pipeBase.PipelineTask):
_DefaultName = 'makeVisitTable'
ConfigClass = MakeVisitTableConfig

def run(self, visitSummaries):

Definition at line 1525 of file postprocess.py.

◆ visitSummary

lsst lsst.pipe.tasks.postprocess.visitSummary : `lsst.afw.table.ExposureCatalog`

Definition at line 1297 of file postprocess.py.

◆ visitSummaryRefs

list lsst.pipe.tasks.postprocess.visitSummaryRefs : `list` of `lsst.daf.butler.DeferredDatasetHandle`
cat = afwTable.ExposureCatalog(self.schema)
cat.resize(len(dataRefs))

cat['visit'] = visit

for i, dataRef in enumerate(dataRefs):
    visitInfo = dataRef.get(component='visitInfo')
    filterLabel = dataRef.get(component='filter')
    summaryStats = dataRef.get(component='summaryStats')
    detector = dataRef.get(component='detector')
    wcs = dataRef.get(component='wcs')
    photoCalib = dataRef.get(component='photoCalib')
    detector = dataRef.get(component='detector')
    bbox = dataRef.get(component='bbox')
    validPolygon = dataRef.get(component='validPolygon')

    rec = cat[i]
    rec.setBBox(bbox)
    rec.setVisitInfo(visitInfo)
    rec.setWcs(wcs)
    rec.setPhotoCalib(photoCalib)
    rec.setValidPolygon(validPolygon)

    rec['physical_filter'] = filterLabel.physicalLabel if filterLabel.hasPhysicalLabel() else ""
    rec['band'] = filterLabel.bandLabel if filterLabel.hasBandLabel() else ""
    rec.setId(detector.getId())
    summaryStats.update_record(rec)

metadata = dafBase.PropertyList()
metadata.add("COMMENT", "Catalog id is detector id, sorted.")
# We are looping over existing datarefs, so the following is true
metadata.add("COMMENT", "Only detectors with data have entries.")
cat.setMetadata(metadata)

cat.sort()
return cat


class ConsolidateSourceTableConnections(pipeBase.PipelineTaskConnections,
                                defaultTemplates={"catalogType": ""},
                                dimensions=("instrument", "visit")):
inputCatalogs = connectionTypes.Input(
doc="Input per-detector Source Tables",
name="{catalogType}sourceTable",
storageClass="DataFrame",
dimensions=("instrument", "visit", "detector"),
multiple=True
)
outputCatalog = connectionTypes.Output(
doc="Per-visit concatenation of Source Table",
name="{catalogType}sourceTable_visit",
storageClass="DataFrame",
dimensions=("instrument", "visit")
)


class ConsolidateSourceTableConfig(pipeBase.PipelineTaskConfig,
                           pipelineConnections=ConsolidateSourceTableConnections):
pass


class ConsolidateSourceTableTask(pipeBase.PipelineTask):
_DefaultName = 'consolidateSourceTable'
ConfigClass = ConsolidateSourceTableConfig

inputDataset = 'sourceTable'
outputDataset = 'sourceTable_visit'

def runQuantum(self, butlerQC, inputRefs, outputRefs):
    from .makeWarp import reorderRefs

    detectorOrder = [ref.dataId['detector'] for ref in inputRefs.inputCatalogs]
    detectorOrder.sort()
    inputRefs = reorderRefs(inputRefs, detectorOrder, dataIdKey='detector')
    inputs = butlerQC.get(inputRefs)
    self.log.info("Concatenating %s per-detector Source Tables",
                  len(inputs['inputCatalogs']))
    df = pd.concat(inputs['inputCatalogs'])
    butlerQC.put(pipeBase.Struct(outputCatalog=df), outputRefs)


class MakeCcdVisitTableConnections(pipeBase.PipelineTaskConnections,
                               dimensions=("instrument",),
                               defaultTemplates={"calexpType": ""}):
visitSummaryRefs = connectionTypes.Input(
    doc="Data references for per-visit consolidated exposure metadata",
    name="finalVisitSummary",
    storageClass="ExposureCatalog",
    dimensions=("instrument", "visit"),
    multiple=True,
    deferLoad=True,
)
outputCatalog = connectionTypes.Output(
    doc="CCD and Visit metadata table",
    name="ccdVisitTable",
    storageClass="DataFrame",
    dimensions=("instrument",)
)


class MakeCcdVisitTableConfig(pipeBase.PipelineTaskConfig,
                          pipelineConnections=MakeCcdVisitTableConnections):
pass


class MakeCcdVisitTableTask(pipeBase.PipelineTask):
_DefaultName = 'makeCcdVisitTable'
ConfigClass = MakeCcdVisitTableConfig

def run(self, visitSummaryRefs):

Definition at line 1418 of file postprocess.py.

◆ wcs

lsst lsst.pipe.tasks.postprocess.wcs : `lsst.afw.geom.SkyWcs`

Definition at line 490 of file postprocess.py.