Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1from builtins import zip 

2import numpy as np 

3from .baseMetric import BaseMetric 

4 

5__all__ = ['NChangesMetric', 

6 'MinTimeBetweenStatesMetric', 'NStateChangesFasterThanMetric', 

7 'MaxStateChangesWithinMetric', 

8 'TeffMetric', 'OpenShutterFractionMetric', 

9 'CompletenessMetric', 'FilterColorsMetric', 'BruteOSFMetric'] 

10 

11 

12class NChangesMetric(BaseMetric): 

13 """ 

14 Compute the number of times a column value changes. 

15 (useful for filter changes in particular). 

16 """ 

17 def __init__(self, col='filter', orderBy='observationStartMJD', **kwargs): 

18 self.col = col 

19 self.orderBy = orderBy 

20 super(NChangesMetric, self).__init__(col=[col, orderBy], units='#', **kwargs) 

21 

22 def run(self, dataSlice, slicePoint=None): 

23 idxs = np.argsort(dataSlice[self.orderBy]) 

24 diff = (dataSlice[self.col][idxs][1:] != dataSlice[self.col][idxs][:-1]) 

25 return np.size(np.where(diff == True)[0]) 

26 

27 

28class MinTimeBetweenStatesMetric(BaseMetric): 

29 """ 

30 Compute the minimum time between changes of state in a column value. 

31 (useful for calculating fastest time between filter changes in particular). 

32 Returns delta time in minutes! 

33 """ 

34 def __init__(self, changeCol='filter', timeCol='observationStartMJD', metricName=None, **kwargs): 

35 """ 

36 changeCol = column that changes state 

37 timeCol = column tracking time of each visit 

38 """ 

39 self.changeCol = changeCol 

40 self.timeCol = timeCol 

41 if metricName is None: 

42 metricName = 'Minimum time between %s changes (minutes)' % (changeCol) 

43 super(MinTimeBetweenStatesMetric, self).__init__(col=[changeCol, timeCol], metricName=metricName, 

44 units='', **kwargs) 

45 

46 def run(self, dataSlice, slicePoint=None): 

47 # Sort on time, to be sure we've got filter (or other col) changes in the right order. 

48 idxs = np.argsort(dataSlice[self.timeCol]) 

49 changes = (dataSlice[self.changeCol][idxs][1:] != dataSlice[self.changeCol][idxs][:-1]) 

50 condition = np.where(changes == True)[0] 

51 changetimes = dataSlice[self.timeCol][idxs][1:][condition] 

52 prevchangetime = np.concatenate((np.array([dataSlice[self.timeCol][idxs][0]]), 

53 dataSlice[self.timeCol][idxs][1:][condition][:-1])) 

54 dtimes = changetimes - prevchangetime 

55 dtimes *= 24*60 

56 if dtimes.size == 0: 

57 return self.badval 

58 return dtimes.min() 

59 

60 

61class NStateChangesFasterThanMetric(BaseMetric): 

62 """ 

63 Compute the number of changes of state that happen faster than 'cutoff'. 

64 (useful for calculating time between filter changes in particular). 

65 'cutoff' should be in minutes. 

66 """ 

67 def __init__(self, changeCol='filter', timeCol='observationStartMJD', metricName=None, cutoff=20, 

68 **kwargs): 

69 """ 

70 col = column tracking changes in 

71 timeCol = column keeping the time of each visit 

72 cutoff = the cutoff value for the reduce method 'NBelow' 

73 """ 

74 if metricName is None: 

75 metricName = 'Number of %s changes faster than <%.1f minutes' % (changeCol, cutoff) 

76 self.changeCol = changeCol 

77 self.timeCol = timeCol 

78 self.cutoff = cutoff/24.0/60.0 # Convert cutoff from minutes to days. 

79 super(NStateChangesFasterThanMetric, self).__init__(col=[changeCol, timeCol], 

80 metricName=metricName, units='#', **kwargs) 

81 

82 def run(self, dataSlice, slicePoint=None): 

83 # Sort on time, to be sure we've got filter (or other col) changes in the right order. 

84 idxs = np.argsort(dataSlice[self.timeCol]) 

85 changes = (dataSlice[self.changeCol][idxs][1:] != dataSlice[self.changeCol][idxs][:-1]) 

86 condition = np.where(changes == True)[0] 

87 changetimes = dataSlice[self.timeCol][idxs][1:][condition] 

88 prevchangetime = np.concatenate((np.array([dataSlice[self.timeCol][idxs][0]]), 

89 dataSlice[self.timeCol][idxs][1:][condition][:-1])) 

90 dtimes = changetimes - prevchangetime 

91 return np.where(dtimes < self.cutoff)[0].size 

92 

93 

94class MaxStateChangesWithinMetric(BaseMetric): 

95 """ 

96 Compute the maximum number of changes of state that occur within a given timespan. 

97 (useful for calculating time between filter changes in particular). 

98 'timespan' should be in minutes. 

99 """ 

100 def __init__(self, changeCol='filter', timeCol='observationStartMJD', metricName=None, timespan=20, 

101 **kwargs): 

102 """ 

103 col = column tracking changes in 

104 timeCol = column keeping the time of each visit 

105 timespan = the timespan to count the number of changes within (in minutes) 

106 """ 

107 if metricName is None: 

108 metricName = 'Max number of %s changes within %.1f minutes' % (changeCol, timespan) 

109 self.changeCol = changeCol 

110 self.timeCol = timeCol 

111 self.timespan = timespan/24./60. # Convert timespan from minutes to days. 

112 super(MaxStateChangesWithinMetric, self).__init__(col=[changeCol, timeCol], 

113 metricName=metricName, units='#', **kwargs) 

114 

115 def run(self, dataSlice, slicePoint=None): 

116 # This operates slightly differently from the metrics above; those calculate only successive times 

117 # between changes, but here we must calculate the actual times of each change. 

118 # Check if there was only one observation (and return 0 if so). 

119 if dataSlice[self.changeCol].size == 1: 

120 return 0 

121 # Sort on time, to be sure we've got filter (or other col) changes in the right order. 

122 idxs = np.argsort(dataSlice[self.timeCol]) 

123 changes = (dataSlice[self.changeCol][idxs][:-1] != dataSlice[self.changeCol][idxs][1:]) 

124 condition = np.where(changes == True)[0] 

125 changetimes = dataSlice[self.timeCol][idxs][1:][condition] 

126 # If there are 0 filter changes ... 

127 if changetimes.size == 0: 

128 return 0 

129 # Otherwise .. 

130 ct_plus = changetimes + self.timespan 

131 indx2 = np.searchsorted(changetimes, ct_plus, side='right') 

132 indx1 = np.arange(changetimes.size) 

133 nchanges = indx2-indx1 

134 return nchanges.max() 

135 

136 

137class TeffMetric(BaseMetric): 

138 """ 

139 Effective time equivalent for a given set of visits. 

140 """ 

141 def __init__(self, m5Col='fiveSigmaDepth', filterCol='filter', metricName='tEff', 

142 fiducialDepth=None, teffBase=30.0, normed=False, **kwargs): 

143 self.m5Col = m5Col 

144 self.filterCol = filterCol 

145 if fiducialDepth is None: 

146 self.depth = {'u': 23.9, 'g': 25.0, 'r': 24.7, 'i': 24.0, 

147 'z': 23.3, 'y': 22.1} # design value 

148 else: 

149 if isinstance(fiducialDepth, dict): 

150 self.depth = fiducialDepth 

151 else: 

152 raise ValueError('fiducialDepth should be None or dictionary') 

153 self.teffBase = teffBase 

154 self.normed = normed 

155 if self.normed: 

156 units = '' 

157 else: 

158 units = 'seconds' 

159 super(TeffMetric, self).__init__(col=[m5Col, filterCol], metricName=metricName, 

160 units=units, **kwargs) 

161 if self.normed: 

162 self.comment = 'Normalized effective time' 

163 else: 

164 self.comment = 'Effect time' 

165 self.comment += ' of a series of observations, evaluating the equivalent amount of time' 

166 self.comment += ' each observation would require if taken at a fiducial limiting magnitude.' 

167 self.comment += ' Fiducial depths are : %s' % self.depth 

168 if self.normed: 

169 self.comment += ' Normalized by the total amount of time actual on-sky.' 

170 

171 def run(self, dataSlice, slicePoint=None): 

172 filters = np.unique(dataSlice[self.filterCol]) 

173 teff = 0.0 

174 for f in filters: 

175 match = np.where(dataSlice[self.filterCol] == f)[0] 

176 teff += (10.0**(0.8*(dataSlice[self.m5Col][match] - self.depth[f]))).sum() 

177 teff *= self.teffBase 

178 if self.normed: 

179 # Normalize by the t_eff if each observation was at the fiducial depth. 

180 teff = teff / (self.teffBase*dataSlice[self.m5Col].size) 

181 return teff 

182 

183 

184class OpenShutterFractionMetric(BaseMetric): 

185 """ 

186 Compute the fraction of time the shutter is open compared to the total time spent observing. 

187 """ 

188 def __init__(self, metricName='OpenShutterFraction', 

189 slewTimeCol='slewTime', expTimeCol='visitExposureTime', visitTimeCol='visitTime', 

190 **kwargs): 

191 self.expTimeCol = expTimeCol 

192 self.visitTimeCol = visitTimeCol 

193 self.slewTimeCol = slewTimeCol 

194 super(OpenShutterFractionMetric, self).__init__(col=[self.expTimeCol, self.visitTimeCol, 

195 self.slewTimeCol], 

196 metricName=metricName, units='OpenShutter/TotalTime', 

197 **kwargs) 

198 self.comment = 'Open shutter time (%s total) divided by total visit time ' \ 

199 '(%s) + slewtime (%s).' %(self.expTimeCol, self.visitTimeCol, self.slewTimeCol) 

200 

201 def run(self, dataSlice, slicePoint=None): 

202 result = (np.sum(dataSlice[self.expTimeCol]) / 

203 np.sum(dataSlice[self.slewTimeCol] + dataSlice[self.visitTimeCol])) 

204 return result 

205 

206 

207class CompletenessMetric(BaseMetric): 

208 """Compute the completeness and joint completeness """ 

209 def __init__(self, filterColName='filter', metricName='Completeness', 

210 u=0, g=0, r=0, i=0, z=0, y=0, **kwargs): 

211 """ 

212 Compute the completeness for the each of the given filters and the 

213 joint completeness across all filters. 

214 

215 Completeness calculated in any filter with a requested 'nvisits' value greater than 0, range is 0-1. 

216 """ 

217 self.filterCol = filterColName 

218 super(CompletenessMetric, self).__init__(col=self.filterCol, metricName=metricName, **kwargs) 

219 self.nvisitsRequested = np.array([u, g, r, i, z, y]) 

220 self.filters = np.array(['u', 'g', 'r', 'i', 'z', 'y']) 

221 # Remove filters from consideration where number of visits requested is zero. 

222 good = np.where(self.nvisitsRequested > 0) 

223 self.nvisitsRequested = self.nvisitsRequested[good] 

224 self.filters = self.filters[good] 

225 # Raise exception if number of visits wasn't changed from the default, for at least one filter. 

226 if len(self.filters) == 0: 

227 raise ValueError('Please set the requested number of visits for at least one filter.') 

228 # Set reduce order, for display purposes. 

229 for i, f in enumerate(['u', 'g', 'r', 'i', 'z', 'y', 'Joint']): 

230 self.reduceOrder[f] = i 

231 self.comment = 'Completeness fraction for each filter (and joint across all filters), calculated' 

232 self.comment += ' as the number of visits compared to a benchmark value of :' 

233 for i, f in enumerate(self.filters): 

234 self.comment += ' %s: %d' % (f, self.nvisitsRequested[i]) 

235 self.comment += '.' 

236 

237 def run(self, dataSlice, slicePoint=None): 

238 """ 

239 Compute the completeness for each filter, and then the minimum (joint) completeness for each slice. 

240 """ 

241 allCompleteness = [] 

242 for f, nVis in zip(self.filters, self.nvisitsRequested): 

243 filterVisits = np.size(np.where(dataSlice[self.filterCol] == f)[0]) 

244 allCompleteness.append(filterVisits/float(nVis)) 

245 allCompleteness.append(np.min(np.array(allCompleteness))) 

246 return np.array(allCompleteness) 

247 

248 def reduceu(self, completeness): 

249 if 'u' in self.filters: 

250 return completeness[np.where(self.filters == 'u')[0]] 

251 else: 

252 return 1 

253 

254 def reduceg(self, completeness): 

255 if 'g' in self.filters: 

256 return completeness[np.where(self.filters == 'g')[0]] 

257 else: 

258 return 1 

259 

260 def reducer(self, completeness): 

261 if 'r' in self.filters: 

262 return completeness[np.where(self.filters == 'r')[0]] 

263 else: 

264 return 1 

265 

266 def reducei(self, completeness): 

267 if 'i' in self.filters: 

268 return completeness[np.where(self.filters == 'i')[0]] 

269 else: 

270 return 1 

271 

272 def reducez(self, completeness): 

273 if 'z' in self.filters: 

274 return completeness[np.where(self.filters == 'z')[0]] 

275 else: 

276 return 1 

277 

278 def reducey(self, completeness): 

279 if 'y' in self.filters: 

280 return completeness[np.where(self.filters == 'y')[0]] 

281 else: 

282 return 1 

283 

284 def reduceJoint(self, completeness): 

285 """ 

286 The joint completeness is just the minimum completeness for a point/field. 

287 """ 

288 return completeness[-1] 

289 

290 

291class FilterColorsMetric(BaseMetric): 

292 """ 

293 Calculate an RGBA value that accounts for the filters used up to time t0. 

294 """ 

295 def __init__(self, rRGB='rRGB', gRGB='gRGB', bRGB='bRGB', 

296 timeCol='observationStartMJD', t0=None, tStep=40./60./60./24., 

297 metricName='FilterColors', **kwargs): 

298 """ 

299 t0 = the current time 

300 """ 

301 self.rRGB = rRGB 

302 self.bRGB = bRGB 

303 self.gRGB = gRGB 

304 self.timeCol = timeCol 

305 self.t0 = t0 

306 if self.t0 is None: 

307 self.t0 = 59580 

308 self.tStep = tStep 

309 super(FilterColorsMetric, self).__init__(col=[rRGB, gRGB, bRGB, timeCol], 

310 metricName=metricName, **kwargs) 

311 self.metricDtype = 'object' 

312 self.comment = 'Metric specifically to generate colors for the opsim movie' 

313 

314 def _scaleColor(self, colorR, colorG, colorB): 

315 r = colorR.sum() 

316 g = colorG.sum() 

317 b = colorB.sum() 

318 scale = 1. / np.max([r, g, b]) 

319 r *= scale 

320 g *= scale 

321 b *= scale 

322 return r, g, b 

323 

324 def run(self, dataSlice, slicePoint=None): 

325 deltaT = np.abs(dataSlice[self.timeCol]-self.t0) 

326 visitNow = np.where(deltaT <= self.tStep)[0] 

327 if len(visitNow) > 0: 

328 # We have exact matches to this timestep, so use their colors directly and set alpha to >1. 

329 r, g, b = self._scaleColor(dataSlice[visitNow][self.rRGB], 

330 dataSlice[visitNow][self.gRGB], 

331 dataSlice[visitNow][self.bRGB]) 

332 alpha = 10. 

333 else: 

334 # This part of the sky has only older exposures. 

335 deltaTmin = deltaT.min() 

336 nObs = len(dataSlice[self.timeCol]) 

337 # Generate a combined color (weighted towards most recent observation). 

338 decay = deltaTmin/deltaT 

339 r, g, b = self._scaleColor(dataSlice[self.rRGB]*decay, 

340 dataSlice[self.gRGB]*decay, 

341 dataSlice[self.bRGB]*decay) 

342 # Then generate an alpha value, between alphamax/alphamid for visits 

343 # happening within the previous 12 hours, then falling between 

344 # alphamid/alphamin with a value that depends on the number of obs. 

345 alphamax = 0.8 

346 alphamid = 0.5 

347 alphamin = 0.2 

348 if deltaTmin < 0.5: 

349 alpha = np.exp(-deltaTmin*10.)*(alphamax - alphamid) + alphamid 

350 else: 

351 alpha = nObs/800.*alphamid 

352 alpha = np.max([alpha, alphamin]) 

353 alpha = np.min([alphamax, alpha]) 

354 return (r, g, b, alpha) 

355 

356 

357class BruteOSFMetric(BaseMetric): 

358 """Assume I can't trust the slewtime or visittime colums. 

359 This computes the fraction of time the shutter is open, with no penalty for the first exposure 

360 after a long gap (e.g., 1st exposure of the night). Presumably, the telescope will need to focus, 

361 so there's not much a scheduler could do to optimize keeping the shutter open after a closure. 

362 """ 

363 def __init__(self, metricName='BruteOSFMetric', 

364 expTimeCol='visitExposureTime', mjdCol='observationStartMJD', maxgap=10., 

365 fudge=0., **kwargs): 

366 """ 

367 Parameters 

368 ---------- 

369 maxgap : float (10.) 

370 The maximum gap between observations. Assume anything longer the dome has closed. 

371 fudge : float (0.) 

372 Fudge factor if a constant has to be added to the exposure time values (like in OpSim 3.61). 

373 expTimeCol : str ('expTime') 

374 The name of the exposure time column. Assumed to be in seconds. 

375 mjdCol : str ('observationStartMJD') 

376 The name of the start of the exposures. Assumed to be in units of days. 

377 """ 

378 self.expTimeCol = expTimeCol 

379 self.maxgap = maxgap/60./24. # convert from min to days 

380 self.mjdCol = mjdCol 

381 self.fudge = fudge 

382 super(BruteOSFMetric, self).__init__(col=[self.expTimeCol, mjdCol], 

383 metricName=metricName, units='OpenShutter/TotalTime', 

384 **kwargs) 

385 

386 def run(self, dataSlice, slicePoint=None): 

387 times = np.sort(dataSlice[self.mjdCol]) 

388 diff = np.diff(times) 

389 good = np.where(diff < self.maxgap) 

390 openTime = np.sum(diff[good])*24.*3600. 

391 result = np.sum(dataSlice[self.expTimeCol]+self.fudge) / float(openTime) 

392 return result