lsst.pipe.tasks g1952d3b776+66328450fc
multiBandUtils.py
Go to the documentation of this file.
1# This file is part of pipe_tasks.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
21import lsst.afw.table as afwTable
22
23from lsst.coadd.utils import ExistingCoaddDataIdContainer
24from lsst.coadd.utils.getGen3CoaddExposureId import getGen3CoaddExposureId
25from lsst.pipe.base import TaskRunner, ArgumentParser
26from lsst.pex.config import Config, RangeField
27from lsst.obs.base import ExposureIdInfo
28
29
30class MergeSourcesRunner(TaskRunner):
31 """Task runner for `MergeDetectionTask` `MergeMeasurementTask`
32
33 Required because the run method requires a list of
34 dataRefs rather than a single dataRef.
35 """
36 def makeTask(self, parsedCmd=None, args=None):
37 """Provide a butler to the Task constructor.
38
39 Parameters
40 ----------
41 parsedCmd:
42 The parsed command
43 args: tuple
44 Tuple of a list of data references and kwargs (un-used)
45
46 Raises
47 ------
48 RuntimeError
49 Thrown if both `parsedCmd` & `args` are `None`
50 """
51 if parsedCmd is not None:
52 butler = parsedCmd.butler
53 elif args is not None:
54 dataRefList, kwargs = args
55 butler = dataRefList[0].getButler()
56 else:
57 raise RuntimeError("Neither parsedCmd or args specified")
58 return self.TaskClass(config=self.config, log=self.log, butler=butler)
59
60 @staticmethod
61 def buildRefDict(parsedCmd):
62 """Build a hierarchical dictionary of patch references
63
64 Parameters
65 ----------
66 parsedCmd:
67 The parsed command
68
69 Returns
70 -------
71 refDict: dict
72 A reference dictionary of the form {patch: {tract: {filter: dataRef}}}
73
74 Raises
75 ------
76 RuntimeError
77 Thrown when multiple references are provided for the same
78 combination of tract, patch and filter
79 """
80 refDict = {} # Will index this as refDict[tract][patch][filter] = ref
81 for ref in parsedCmd.id.refList:
82 tract = ref.dataId["tract"]
83 patch = ref.dataId["patch"]
84 filter = ref.dataId["filter"]
85 if tract not in refDict:
86 refDict[tract] = {}
87 if patch not in refDict[tract]:
88 refDict[tract][patch] = {}
89 if filter in refDict[tract][patch]:
90 raise RuntimeError("Multiple versions of %s" % (ref.dataId,))
91 refDict[tract][patch][filter] = ref
92 return refDict
93
94 @staticmethod
95 def getTargetList(parsedCmd, **kwargs):
96 """Provide a list of patch references for each patch, tract, filter combo.
97
98 Parameters
99 ----------
100 parsedCmd:
101 The parsed command
102 kwargs:
103 Keyword arguments passed to the task
104
105 Returns
106 -------
107 targetList: list
108 List of tuples, where each tuple is a (dataRef, kwargs) pair.
109 """
110 refDict = MergeSourcesRunner.buildRefDict(parsedCmd)
111 return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()]
112
113
114def _makeGetSchemaCatalogs(datasetSuffix):
115 """Construct a getSchemaCatalogs instance method
116
117 These are identical for most of the classes here, so we'll consolidate
118 the code.
119
120 datasetSuffix: Suffix of dataset name, e.g., "src" for "deepCoadd_src"
121 """
122
123 def getSchemaCatalogs(self):
124 """Return a dict of empty catalogs for each catalog dataset produced by this task."""
125 src = afwTable.SourceCatalog(self.schema)
126 if hasattr(self, "algMetadata"):
127 src.getTable().setMetadata(self.algMetadata)
128 return {self.config.coaddName + "Coadd_" + datasetSuffix: src}
129 return getSchemaCatalogs
130
131
132def makeMergeArgumentParser(name, dataset):
133 """!
134 @brief Create a suitable ArgumentParser.
135
136 We will use the ArgumentParser to get a provide a list of data
137 references for patches; the RunnerClass will sort them into lists
138 of data references for the same patch
139 """
140 parser = ArgumentParser(name)
141 parser.add_id_argument("--id", "deepCoadd_" + dataset,
142 ContainerClass=ExistingCoaddDataIdContainer,
143 help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i")
144 return parser
145
146
147def getInputSchema(task, butler=None, schema=None):
148 """!
149 @brief Obtain the input schema either directly or froma butler reference.
150
151 @param[in] butler butler reference to obtain the input schema from
152 @param[in] schema the input schema
153 """
154 if schema is None:
155 assert butler is not None, "Neither butler nor schema specified"
156 schema = butler.get(task.config.coaddName + "Coadd_" + task.inputDataset + "_schema",
157 immediate=True).schema
158 return schema
159
160
161def readCatalog(task, patchRef):
162 """!
163 @brief Read input catalog.
164
165 We read the input dataset provided by the 'inputDataset'
166 class variable.
167
168 @param[in] patchRef data reference for patch
169 @return tuple consisting of the band name and the catalog
170 """
171 band = patchRef.get(task.config.coaddName + "Coadd_filterLabel", immediate=True).bandLabel
172 catalog = patchRef.get(task.config.coaddName + "Coadd_" + task.inputDataset, immediate=True)
173 task.log.info("Read %d sources for band %s: %s", len(catalog), band, patchRef.dataId)
174 return band, catalog
175
176
177class CullPeaksConfig(Config):
178 """!
179 @anchor CullPeaksConfig_
180
181 @brief Configuration for culling garbage peaks after merging footprints.
182
183 Peaks may also be culled after detection or during deblending; this configuration object
184 only deals with culling after merging Footprints.
185
186 These cuts are based on three quantities:
187 - nBands: the number of bands in which the peak was detected
188 - peakRank: the position of the peak within its family, sorted from brightest to faintest.
189 - peakRankNormalized: the peak rank divided by the total number of peaks in the family.
190
191 The formula that identifie peaks to cull is:
192
193 nBands < nBandsSufficient
194 AND (rank >= rankSufficient)
195 AND (rank >= rankConsider OR rank >= rankNormalizedConsider)
196
197 To disable peak culling, simply set nBandsSufficient=1.
198 """
199
200 nBandsSufficient = RangeField(dtype=int, default=2, min=1,
201 doc="Always keep peaks detected in this many bands")
202 rankSufficient = RangeField(dtype=int, default=20, min=1,
203 doc="Always keep this many peaks in each family")
204 rankConsidered = RangeField(dtype=int, default=30, min=1,
205 doc=("Keep peaks with less than this rank that also match the "
206 "rankNormalizedConsidered condition."))
207 rankNormalizedConsidered = RangeField(dtype=float, default=0.7, min=0.0,
208 doc=("Keep peaks with less than this normalized rank that"
209 " also match the rankConsidered condition."))
210
211
212def _makeMakeIdFactory(datasetName, includeBand=True):
213 """Construct a makeIdFactory instance method
214
215 These are identical for all the classes here, so this consolidates
216 the code.
217
218 datasetName: Dataset name without the coadd name prefix, e.g., "CoaddId" for "deepCoaddId"
219 """
220
221 def makeIdFactory(self, dataRef):
222 """Return an IdFactory for setting the detection identifiers
223
224 The actual parameters used in the IdFactory are provided by
225 the butler (through the provided data reference.
226 """
227 expId = getGen3CoaddExposureId(dataRef, coaddName=self.config.coaddName, includeBand=includeBand,
228 log=self.log)
229 info = ExposureIdInfo(expId, dataRef.get(self.config.coaddName + datasetName + "_bits"))
230 return info.makeSourceIdFactory()
231 return makeIdFactory
def makeTask(self, parsedCmd=None, args=None)
def makeMergeArgumentParser(name, dataset)
Create a suitable ArgumentParser.
def getInputSchema(task, butler=None, schema=None)
Obtain the input schema either directly or froma butler reference.
def readCatalog(task, patchRef)
Read input catalog.