Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1import numpy as np 

2from .baseMetric import BaseMetric 

3 

4__all__ = ['TemplateExistsMetric', 'UniformityMetric', 

5 'RapidRevisitUniformityMetric', 'RapidRevisitMetric','NRevisitsMetric', 'IntraNightGapsMetric', 

6 'InterNightGapsMetric', 'VisitGapMetric', 'SeasonLengthMetric'] 

7 

8 

9class fSMetric(BaseMetric): 

10 """Calculate the fS value (Nvisit-weighted delta(M5-M5srd)). 

11 """ 

12 def __init__(self, filterCol='filter', metricName='fS', **kwargs): 

13 self.filterCol = filterCol 

14 cols = [self.filterCol] 

15 super().__init__(cols=cols, metricName=metricName, units='fS', **kwargs) 

16 

17 def run(self, dataSlice, slicePoint=None): 

18 """"Calculate the fS (reserve above/below the m5 values from the LSST throughputs) 

19 

20 Parameters 

21 ---------- 

22 dataSlice : numpy.array 

23 Numpy structured array containing the data related to the visits provided by the slicer. 

24 slicePoint : dict, optional 

25 Dictionary containing information about the slicepoint currently active in the slicer. 

26 

27 Returns 

28 ------- 

29 float 

30 The fS value. 

31 """ 

32 # We could import this from the m5_flat_sed values, but it makes sense to calculate the m5 

33 # directly from the throughputs. This is easy enough to do and will allow variation of 

34 # the throughput curves and readnoise and visit length, etc. 

35 

36 

37class TemplateExistsMetric(BaseMetric): 

38 """Calculate the fraction of images with a previous template image of desired quality. 

39 """ 

40 def __init__(self, seeingCol='seeingFwhmGeom', observationStartMJDCol='observationStartMJD', 

41 metricName='TemplateExistsMetric', **kwargs): 

42 cols = [seeingCol, observationStartMJDCol] 

43 super(TemplateExistsMetric, self).__init__(col=cols, metricName=metricName, 

44 units='fraction', **kwargs) 

45 self.seeingCol = seeingCol 

46 self.observationStartMJDCol = observationStartMJDCol 

47 

48 def run(self, dataSlice, slicePoint=None): 

49 """"Calculate the fraction of images with a previous template image of desired quality. 

50 

51 Parameters 

52 ---------- 

53 dataSlice : numpy.array 

54 Numpy structured array containing the data related to the visits provided by the slicer. 

55 slicePoint : dict, optional 

56 Dictionary containing information about the slicepoint currently active in the slicer. 

57 

58 Returns 

59 ------- 

60 float 

61 The fraction of images with a 'good' previous template image. 

62 """ 

63 # Check that data is sorted in observationStartMJD order 

64 dataSlice.sort(order=self.observationStartMJDCol) 

65 # Find the minimum seeing up to a given time 

66 seeing_mins = np.minimum.accumulate(dataSlice[self.seeingCol]) 

67 # Find the difference between the seeing and the minimum seeing at the previous visit 

68 seeing_diff = dataSlice[self.seeingCol] - np.roll(seeing_mins, 1) 

69 # First image never has a template; check how many others do 

70 good = np.where(seeing_diff[1:] >= 0.)[0] 

71 frac = (good.size) / float(dataSlice[self.seeingCol].size) 

72 return frac 

73 

74 

75class UniformityMetric(BaseMetric): 

76 """Calculate how uniformly the observations are spaced in time. 

77 Returns a value between -1 and 1. 

78 A value of zero means the observations are perfectly uniform. 

79 

80 Parameters 

81 ---------- 

82 surveyLength : float, optional 

83 The overall duration of the survey. Default 10. 

84 """ 

85 def __init__(self, mjdCol='observationStartMJD', units='', 

86 surveyLength=10., **kwargs): 

87 """surveyLength = time span of survey (years) """ 

88 self.mjdCol = mjdCol 

89 super(UniformityMetric, self).__init__(col=self.mjdCol, units=units, **kwargs) 

90 self.surveyLength = surveyLength 

91 

92 def run(self, dataSlice, slicePoint=None): 

93 """"Calculate the survey uniformity. 

94 

95 This is based on how a KS-test works: look at the cumulative distribution of observation dates, 

96 and compare to a perfectly uniform cumulative distribution. 

97 Perfectly uniform observations = 0, perfectly non-uniform = 1. 

98 

99 Parameters 

100 ---------- 

101 dataSlice : numpy.array 

102 Numpy structured array containing the data related to the visits provided by the slicer. 

103 slicePoint : dict, optional 

104 Dictionary containing information about the slicepoint currently active in the slicer. 

105 

106 Returns 

107 ------- 

108 float 

109 Uniformity of 'observationStartMJDCol'. 

110 """ 

111 # If only one observation, there is no uniformity 

112 if dataSlice[self.mjdCol].size == 1: 

113 return 1 

114 # Scale dates to lie between 0 and 1, where 0 is the first observation date and 1 is surveyLength 

115 dates = (dataSlice[self.mjdCol] - dataSlice[self.mjdCol].min()) / \ 

116 (self.surveyLength * 365.25) 

117 dates.sort() # Just to be sure 

118 n_cum = np.arange(1, dates.size + 1) / float(dates.size) 

119 D_max = np.max(np.abs(n_cum - dates - dates[1])) 

120 return D_max 

121 

122 

123class RapidRevisitUniformityMetric(BaseMetric): 

124 """Calculate uniformity of time between consecutive visits on short timescales (for RAV1). 

125 

126 Parameters 

127 ---------- 

128 mjdCol : str, optional 

129 The column containing the 'time' value. Default observationStartMJD. 

130 minNvisits : int, optional 

131 The minimum number of visits required within the time interval (dTmin to dTmax). 

132 Default 100. 

133 dTmin : float, optional 

134 The minimum dTime to consider (in days). Default 40 seconds. 

135 dTmax : float, optional 

136 The maximum dTime to consider (in days). Default 30 minutes. 

137 """ 

138 def __init__(self, mjdCol='observationStartMJD', minNvisits=100, 

139 dTmin=40.0 / 60.0 / 60.0 / 24.0, dTmax=30.0 / 60.0 / 24.0, 

140 metricName='RapidRevisitUniformity', **kwargs): 

141 self.mjdCol = mjdCol 

142 self.minNvisits = minNvisits 

143 self.dTmin = dTmin 

144 self.dTmax = dTmax 

145 super().__init__(col=self.mjdCol, metricName=metricName, **kwargs) 

146 # Update minNvisits, as 0 visits will crash algorithm and 1 is nonuniform by definition. 

147 if self.minNvisits <= 1: 

148 self.minNvisits = 2 

149 

150 def run(self, dataSlice, slicePoint=None): 

151 """Calculate the uniformity of visits within dTmin to dTmax. 

152 

153 Uses a the same 'uniformity' calculation as the UniformityMetric, based on the KS-test. 

154 A value of 0 is perfectly uniform; a value of 1 is purely non-uniform. 

155 

156 Parameters 

157 ---------- 

158 dataSlice : numpy.array 

159 Numpy structured array containing the data related to the visits provided by the slicer. 

160 slicePoint : dict, optional 

161 Dictionary containing information about the slicepoint currently active in the slicer. 

162 

163 Returns 

164 ------- 

165 float 

166 The uniformity measurement of the visits within time interval dTmin to dTmax. 

167 """ 

168 # Calculate consecutive visit time intervals 

169 dtimes = np.diff(np.sort(dataSlice[self.mjdCol])) 

170 # Identify dtimes within interval from dTmin/dTmax. 

171 good = np.where((dtimes >= self.dTmin) & (dtimes <= self.dTmax))[0] 

172 # If there are not enough visits in this time range, return bad value. 

173 if good.size < self.minNvisits: 

174 return self.badval 

175 # Throw out dtimes outside desired range, and sort, then scale to 0-1. 

176 dtimes = np.sort(dtimes[good]) 

177 dtimes = (dtimes - dtimes.min()) / float(self.dTmax - self.dTmin) 

178 # Set up a uniform distribution between 0-1 (to match dtimes). 

179 uniform_dtimes = np.arange(1, dtimes.size + 1, 1) / float(dtimes.size) 

180 # Look at the differences between our times and the uniform times. 

181 dmax = np.max(np.abs(uniform_dtimes - dtimes - dtimes[1])) 

182 return dmax 

183 

184 

185class RapidRevisitMetric(BaseMetric): 

186 def __init__(self, mjdCol='observationStartMJD', metricName='RapidRevisit', 

187 dTmin=40.0 / 60.0 / 60.0 / 24.0, dTpairs = 20.0 / 60.0 / 24.0, 

188 dTmax = 30.0 / 60.0 / 24.0, minN1 = 28, minN2 = 82, **kwargs): 

189 self.mjdCol = mjdCol 

190 self.dTmin = dTmin 

191 self.dTpairs = dTpairs 

192 self.dTmax = dTmax 

193 self.minN1 = minN1 

194 self.minN2 = minN2 

195 super().__init__(col=self.mjdCol, metricName=metricName, **kwargs) 

196 

197 def run(self, dataSlice, slicePoint=None): 

198 dtimes = np.diff(np.sort(dataSlice[self.mjdCol])) 

199 N1 = len(np.where((dtimes >= self.dTmin) & (dtimes <= self.dTpairs))[0]) 

200 N2 = len(np.where((dtimes >= self.dTmin) & (dtimes <= self.dTmax))[0]) 

201 if (N1 >= self.minN1) and (N2 >= self.minN2): 

202 val = 1 

203 else: 

204 val = 0 

205 return val 

206 

207 

208class NRevisitsMetric(BaseMetric): 

209 """Calculate the number of consecutive visits with time differences less than dT. 

210 

211 Parameters 

212 ---------- 

213 dT : float, optional 

214 The time interval to consider (in minutes). Default 30. 

215 normed : bool, optional 

216 Flag to indicate whether to return the total number of consecutive visits with time 

217 differences less than dT (False), or the fraction of overall visits (True). 

218 Note that we would expect (if all visits occur in pairs within dT) this fraction would be 0.5! 

219 """ 

220 def __init__(self, mjdCol='observationStartMJD', dT=30.0, normed=False, metricName=None, **kwargs): 

221 units = '' 

222 if metricName is None: 

223 if normed: 

224 metricName = 'Fraction of revisits faster than %.1f minutes' % (dT) 

225 else: 

226 metricName = 'Number of revisits faster than %.1f minutes' % (dT) 

227 units = '#' 

228 self.mjdCol = mjdCol 

229 self.dT = dT / 60. / 24. # convert to days 

230 self.normed = normed 

231 super(NRevisitsMetric, self).__init__(col=self.mjdCol, units=units, metricName=metricName, **kwargs) 

232 

233 def run(self, dataSlice, slicePoint=None): 

234 """Count the number of consecutive visits occuring within time intervals dT. 

235 

236 Parameters 

237 ---------- 

238 dataSlice : numpy.array 

239 Numpy structured array containing the data related to the visits provided by the slicer. 

240 slicePoint : dict, optional 

241 Dictionary containing information about the slicepoint currently active in the slicer. 

242 

243 Returns 

244 ------- 

245 float 

246 Either the total number of consecutive visits within dT or the fraction compared to overall visits. 

247 """ 

248 dtimes = np.diff(np.sort(dataSlice[self.mjdCol])) 

249 nFastRevisits = np.size(np.where(dtimes <= self.dT)[0]) 

250 if self.normed: 

251 nFastRevisits = nFastRevisits / float(np.size(dataSlice[self.mjdCol])) 

252 return nFastRevisits 

253 

254 

255class IntraNightGapsMetric(BaseMetric): 

256 """ 

257 Calculate the gap between consecutive observations within a night, in hours. 

258 

259 Parameters 

260 ---------- 

261 reduceFunc : function, optional 

262 Function that can operate on array-like structures. Typically numpy function. 

263 Default np.median. 

264 """ 

265 

266 def __init__(self, mjdCol='observationStartMJD', nightCol='night', reduceFunc=np.median, 

267 metricName='Median Intra-Night Gap', **kwargs): 

268 units = 'hours' 

269 self.mjdCol = mjdCol 

270 self.nightCol = nightCol 

271 self.reduceFunc = reduceFunc 

272 super(IntraNightGapsMetric, self).__init__(col=[self.mjdCol, self.nightCol], 

273 units=units, metricName=metricName, **kwargs) 

274 

275 def run(self, dataSlice, slicePoint=None): 

276 """Calculate the (reduceFunc) of the gap between consecutive obervations within a night. 

277 

278 Parameters 

279 ---------- 

280 dataSlice : numpy.array 

281 Numpy structured array containing the data related to the visits provided by the slicer. 

282 slicePoint : dict, optional 

283 Dictionary containing information about the slicepoint currently active in the slicer. 

284 

285 Returns 

286 ------- 

287 float 

288 The (reduceFunc) value of the gap, in hours. 

289 """ 

290 dataSlice.sort(order=self.mjdCol) 

291 dt = np.diff(dataSlice[self.mjdCol]) 

292 dn = np.diff(dataSlice[self.nightCol]) 

293 

294 good = np.where(dn == 0) 

295 if np.size(good[0]) == 0: 

296 result = self.badval 

297 else: 

298 result = self.reduceFunc(dt[good]) * 24 

299 return result 

300 

301 

302class InterNightGapsMetric(BaseMetric): 

303 """ 

304 Calculate the gap between consecutive observations in different nights, in days. 

305 

306 Parameters 

307 ---------- 

308 reduceFunc : function, optional 

309 Function that can operate on array-like structures. Typically numpy function. 

310 Default np.median. 

311 """ 

312 def __init__(self, mjdCol='observationStartMJD', nightCol='night', reduceFunc=np.median, 

313 metricName='Median Inter-Night Gap', **kwargs): 

314 units = 'days' 

315 self.mjdCol = mjdCol 

316 self.nightCol = nightCol 

317 self.reduceFunc = reduceFunc 

318 super(InterNightGapsMetric, self).__init__(col=[self.mjdCol, self.nightCol], 

319 units=units, metricName=metricName, **kwargs) 

320 

321 def run(self, dataSlice, slicePoint=None): 

322 """Calculate the (reduceFunc) of the gap between consecutive nights of observations. 

323 Parameters 

324 ---------- 

325 dataSlice : numpy.array 

326 Numpy structured array containing the data related to the visits provided by the slicer. 

327 slicePoint : dict, optional 

328 Dictionary containing information about the slicepoint currently active in the slicer. 

329 

330 Returns 

331 ------- 

332 float 

333 The (reduceFunc) of the gap between consecutive nights of observations, in days. 

334 """ 

335 dataSlice.sort(order=self.mjdCol) 

336 unights = np.unique(dataSlice[self.nightCol]) 

337 if np.size(unights) < 2: 

338 result = self.badval 

339 else: 

340 # Find the first and last observation of each night 

341 firstOfNight = np.searchsorted(dataSlice[self.nightCol], unights) 

342 lastOfNight = np.searchsorted(dataSlice[self.nightCol], unights, side='right') - 1 

343 diff = dataSlice[self.mjdCol][firstOfNight[1:]] - dataSlice[self.mjdCol][lastOfNight[:-1]] 

344 result = self.reduceFunc(diff) 

345 return result 

346 

347 

348class VisitGapMetric(BaseMetric): 

349 """ 

350 Calculate the gap between any consecutive observations, in hours, regardless of night boundaries. 

351 

352 Parameters 

353 ---------- 

354 reduceFunc : function, optional 

355 Function that can operate on array-like structures. Typically numpy function. 

356 Default np.median. 

357 """ 

358 def __init__(self, mjdCol='observationStartMJD', nightCol='night', reduceFunc=np.median, 

359 metricName='VisitGap', **kwargs): 

360 units = 'hours' 

361 self.mjdCol = mjdCol 

362 self.nightCol = nightCol 

363 self.reduceFunc = reduceFunc 

364 super().__init__(col=[self.mjdCol, self.nightCol], 

365 units=units, metricName=metricName, **kwargs) 

366 

367 def run(self, dataSlice, slicePoint=None): 

368 """Calculate the (reduceFunc) of the gap between consecutive observations. 

369 

370 Different from inter-night and intra-night gaps, between this is really just counting 

371 all of the times between consecutive observations (not time between nights or time within a night). 

372 

373 Parameters 

374 ---------- 

375 dataSlice : numpy.array 

376 Numpy structured array containing the data related to the visits provided by the slicer. 

377 slicePoint : dict, optional 

378 Dictionary containing information about the slicepoint currently active in the slicer. 

379 

380 Returns 

381 ------- 

382 float 

383 The (reduceFunc) of the time between consecutive observations, in hours. 

384 """ 

385 dataSlice.sort(order=self.mjdCol) 

386 diff = np.diff(dataSlice[self.mjdCol]) 

387 result = self.reduceFunc(diff) * 24. 

388 return result 

389 

390class SeasonLengthMetric(BaseMetric): 

391 """ 

392 Calculate the length of LSST seasons, in days. 

393 

394 Parameters 

395 ---------- 

396 reduceFunc : function, optional 

397 Function that can operate on array-like structures. Typically numpy function. 

398 This reduces the season length in each season from 10 separate values to a single value. 

399 Default np.median. 

400 """ 

401 def __init__(self, mjdCol='observationStartMJD', seasonCol='season', reduceFunc=np.median, 

402 metricName='SeasonLength', **kwargs): 

403 units = 'days' 

404 self.mjdCol = mjdCol 

405 self.seasonCol = seasonCol 

406 self.reduceFunc = reduceFunc 

407 super().__init__(col=[self.mjdCol, self.seasonCol], 

408 units=units, metricName=metricName, **kwargs) 

409 

410 def run(self, dataSlice, slicePoint=None): 

411 """Calculate the (reduceFunc) of the length of each season. 

412 

413 Parameters 

414 ---------- 

415 dataSlice : numpy.array 

416 Numpy structured array containing the data related to the visits provided by the slicer. 

417 slicePoint : dict, optional 

418 Dictionary containing information about the slicepoint currently active in the slicer. 

419 

420 Returns 

421 ------- 

422 float 

423 The (reduceFunc) of the length of each season, in days. 

424 """ 

425 dataSlice.sort(order=self.seasonCol) 

426 lenData = len(dataSlice) 

427 seasons = np.unique(dataSlice[self.seasonCol]) 

428 # Find the first and last observation of each season. 

429 firstOfSeason= np.searchsorted(dataSlice[self.seasonCol], seasons) 

430 lastOfSeason = np.searchsorted(dataSlice[self.seasonCol], seasons, side='right') - 1 

431 # Seasons may not match up around 0/360 boundary I suspect. This is a bit of a hack. 

432 #firstOfSeason = np.where(firstOfSeason == lenData, lenData - 1, firstOfSeason) 

433 #lastOfSeason = np.where(lastOfSeason == lenData, lenData - 1, lastOfSeason) 

434 length = dataSlice[self.mjdCol][lastOfSeason] - dataSlice[self.mjdCol][firstOfSeason] 

435 result = self.reduceFunc(length) 

436 return result