Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

import lsst.afw.image as afwImage 

import lsst.afw.table as afwTable 

import lsst.pex.exceptions as pexExceptions 

 

from lsst.coadd.utils import ExistingCoaddDataIdContainer 

from lsst.pipe.base import TaskRunner, ArgumentParser 

from lsst.pex.config import Config, RangeField 

 

 

class MergeSourcesRunner(TaskRunner): 

"""Task runner for `MergeDetectionTask` `MergeMeasurementTask` 

 

Required because the run method requires a list of 

dataRefs rather than a single dataRef. 

""" 

def makeTask(self, parsedCmd=None, args=None): 

"""Provide a butler to the Task constructor. 

 

Parameters 

---------- 

parsedCmd: 

The parsed command 

args: tuple 

Tuple of a list of data references and kwargs (un-used) 

 

Raises 

------ 

RuntimeError 

Thrown if both `parsedCmd` & `args` are `None` 

""" 

if parsedCmd is not None: 

butler = parsedCmd.butler 

elif args is not None: 

dataRefList, kwargs = args 

butler = dataRefList[0].getButler() 

else: 

raise RuntimeError("Neither parsedCmd or args specified") 

return self.TaskClass(config=self.config, log=self.log, butler=butler) 

 

@staticmethod 

def buildRefDict(parsedCmd): 

"""Build a hierarchical dictionary of patch references 

 

Parameters 

---------- 

parsedCmd: 

The parsed command 

 

Returns 

------- 

refDict: dict 

A reference dictionary of the form {patch: {tract: {filter: dataRef}}} 

 

Raises 

------ 

RuntimeError 

Thrown when multiple references are provided for the same 

combination of tract, patch and filter 

""" 

refDict = {} # Will index this as refDict[tract][patch][filter] = ref 

for ref in parsedCmd.id.refList: 

tract = ref.dataId["tract"] 

patch = ref.dataId["patch"] 

filter = ref.dataId["filter"] 

if tract not in refDict: 

refDict[tract] = {} 

if patch not in refDict[tract]: 

refDict[tract][patch] = {} 

if filter in refDict[tract][patch]: 

raise RuntimeError("Multiple versions of %s" % (ref.dataId,)) 

refDict[tract][patch][filter] = ref 

return refDict 

 

@staticmethod 

def getTargetList(parsedCmd, **kwargs): 

"""Provide a list of patch references for each patch, tract, filter combo. 

 

Parameters 

---------- 

parsedCmd: 

The parsed command 

kwargs: 

Keyword arguments passed to the task 

 

Returns 

------- 

targetList: list 

List of tuples, where each tuple is a (dataRef, kwargs) pair. 

""" 

refDict = MergeSourcesRunner.buildRefDict(parsedCmd) 

return [(list(p.values()), kwargs) for t in refDict.values() for p in t.values()] 

 

 

def _makeGetSchemaCatalogs(datasetSuffix): 

"""Construct a getSchemaCatalogs instance method 

 

These are identical for most of the classes here, so we'll consolidate 

the code. 

 

datasetSuffix: Suffix of dataset name, e.g., "src" for "deepCoadd_src" 

""" 

 

def getSchemaCatalogs(self): 

"""Return a dict of empty catalogs for each catalog dataset produced by this task.""" 

src = afwTable.SourceCatalog(self.schema) 

if hasattr(self, "algMetadata"): 

src.getTable().setMetadata(self.algMetadata) 

return {self.config.coaddName + "Coadd_" + datasetSuffix: src} 

return getSchemaCatalogs 

 

 

def makeMergeArgumentParser(name, dataset): 

"""! 

@brief Create a suitable ArgumentParser. 

 

We will use the ArgumentParser to get a provide a list of data 

references for patches; the RunnerClass will sort them into lists 

of data references for the same patch 

""" 

parser = ArgumentParser(name) 

parser.add_id_argument("--id", "deepCoadd_" + dataset, 

ContainerClass=ExistingCoaddDataIdContainer, 

help="data ID, e.g. --id tract=12345 patch=1,2 filter=g^r^i") 

return parser 

 

 

def getInputSchema(task, butler=None, schema=None): 

"""! 

@brief Obtain the input schema either directly or froma butler reference. 

 

@param[in] butler butler reference to obtain the input schema from 

@param[in] schema the input schema 

""" 

if schema is None: 

assert butler is not None, "Neither butler nor schema specified" 

schema = butler.get(task.config.coaddName + "Coadd_" + task.inputDataset + "_schema", 

immediate=True).schema 

return schema 

 

 

def getShortFilterName(name): 

"""Given a longer, camera-specific filter name (e.g. "HSC-I") return its shorthand name ("i"). 

""" 

# I'm not sure if this is the way this is supposed to be implemented, but it seems to work, 

# and its the only way I could get it to work. 

try: 

return afwImage.Filter(name).getFilterProperty().getName() 

except pexExceptions.NotFoundError: 

# No mapping could be found, try proceeding with given name 

return name 

 

 

def readCatalog(task, patchRef): 

"""! 

@brief Read input catalog. 

 

We read the input dataset provided by the 'inputDataset' 

class variable. 

 

@param[in] patchRef data reference for patch 

@return tuple consisting of the filter name and the catalog 

""" 

filterName = patchRef.dataId["filter"] 

catalog = patchRef.get(task.config.coaddName + "Coadd_" + task.inputDataset, immediate=True) 

task.log.info("Read %d sources for filter %s: %s" % (len(catalog), filterName, patchRef.dataId)) 

return filterName, catalog 

 

 

class CullPeaksConfig(Config): 

"""! 

@anchor CullPeaksConfig_ 

 

@brief Configuration for culling garbage peaks after merging footprints. 

 

Peaks may also be culled after detection or during deblending; this configuration object 

only deals with culling after merging Footprints. 

 

These cuts are based on three quantities: 

- nBands: the number of bands in which the peak was detected 

- peakRank: the position of the peak within its family, sorted from brightest to faintest. 

- peakRankNormalized: the peak rank divided by the total number of peaks in the family. 

 

The formula that identifie peaks to cull is: 

 

nBands < nBandsSufficient 

AND (rank >= rankSufficient) 

AND (rank >= rankConsider OR rank >= rankNormalizedConsider) 

 

To disable peak culling, simply set nBandsSufficient=1. 

""" 

 

nBandsSufficient = RangeField(dtype=int, default=2, min=1, 

doc="Always keep peaks detected in this many bands") 

rankSufficient = RangeField(dtype=int, default=20, min=1, 

doc="Always keep this many peaks in each family") 

rankConsidered = RangeField(dtype=int, default=30, min=1, 

doc=("Keep peaks with less than this rank that also match the " 

"rankNormalizedConsidered condition.")) 

rankNormalizedConsidered = RangeField(dtype=float, default=0.7, min=0.0, 

doc=("Keep peaks with less than this normalized rank that" 

" also match the rankConsidered condition.")) 

 

 

def _makeMakeIdFactory(datasetName): 

"""Construct a makeIdFactory instance method 

 

These are identical for all the classes here, so this consolidates 

the code. 

 

datasetName: Dataset name without the coadd name prefix, e.g., "CoaddId" for "deepCoaddId" 

""" 

 

def makeIdFactory(self, dataRef): 

"""Return an IdFactory for setting the detection identifiers 

 

The actual parameters used in the IdFactory are provided by 

the butler (through the provided data reference. 

""" 

expBits = dataRef.get(self.config.coaddName + datasetName + "_bits") 

expId = int(dataRef.get(self.config.coaddName + datasetName)) 

return afwTable.IdFactory.makeSource(expId, 64 - expBits) 

return makeIdFactory