22__all__ = [
"CullPeaksConfig"]
29def _makeGetSchemaCatalogs(datasetSuffix):
30 """Construct a getSchemaCatalogs instance method
32 These are identical for most of the classes here, so we
'll consolidate
35 datasetSuffix: Suffix of dataset name, e.g., "src" for "deepCoadd_src"
38 def getSchemaCatalogs(self):
39 """Return a dict of empty catalogs for each catalog dataset produced by this task."""
40 src = afwTable.SourceCatalog(self.schema)
41 if hasattr(self,
"algMetadata"):
42 src.getTable().setMetadata(self.algMetadata)
43 return {self.config.coaddName +
"Coadd_" + datasetSuffix: src}
44 return getSchemaCatalogs
48 """Configuration for culling garbage peaks after merging footprints.
50 Peaks may also be culled after detection or during deblending; this configuration object
51 only deals
with culling after merging Footprints.
53 These cuts are based on three quantities:
54 - nBands: the number of bands
in which the peak was detected
55 - peakRank: the position of the peak within its family, sorted
from brightest to faintest.
56 - peakRankNormalized: the peak rank divided by the total number of peaks
in the family.
58 The formula that identifie peaks to cull
is:
60 nBands < nBandsSufficient
61 AND (rank >= rankSufficient)
62 AND (rank >= rankConsider OR rank >= rankNormalizedConsider)
64 To disable peak culling, simply set nBandsSufficient=1.
66 nBandsSufficient = RangeField(dtype=int, default=2, min=1,
67 doc="Always keep peaks detected in this many bands")
68 rankSufficient = RangeField(dtype=int, default=20, min=1,
69 doc=
"Always keep this many peaks in each family")
70 rankConsidered = RangeField(dtype=int, default=30, min=1,
71 doc=(
"Keep peaks with less than this rank that also match the "
72 "rankNormalizedConsidered condition."))
73 rankNormalizedConsidered = RangeField(dtype=float, default=0.7, min=0.0,
74 doc=(
"Keep peaks with less than this normalized rank that"
75 " also match the rankConsidered condition."))