Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

from builtins import zip 

import numpy as np 

from .baseMetric import BaseMetric 

 

__all__ = ['NChangesMetric', 

'MinTimeBetweenStatesMetric', 'NStateChangesFasterThanMetric', 

'MaxStateChangesWithinMetric', 

'TeffMetric', 'OpenShutterFractionMetric', 

'CompletenessMetric', 'FilterColorsMetric', 'BruteOSFMetric'] 

 

 

class NChangesMetric(BaseMetric): 

""" 

Compute the number of times a column value changes. 

(useful for filter changes in particular). 

""" 

def __init__(self, col='filter', orderBy='observationStartMJD', **kwargs): 

self.col = col 

self.orderBy = orderBy 

super(NChangesMetric, self).__init__(col=[col, orderBy], units='#', **kwargs) 

 

def run(self, dataSlice, slicePoint=None): 

idxs = np.argsort(dataSlice[self.orderBy]) 

diff = (dataSlice[self.col][idxs][1:] != dataSlice[self.col][idxs][:-1]) 

return np.size(np.where(diff == True)[0]) 

 

 

class MinTimeBetweenStatesMetric(BaseMetric): 

""" 

Compute the minimum time between changes of state in a column value. 

(useful for calculating fastest time between filter changes in particular). 

Returns delta time in minutes! 

""" 

def __init__(self, changeCol='filter', timeCol='observationStartMJD', metricName=None, **kwargs): 

""" 

changeCol = column that changes state 

timeCol = column tracking time of each visit 

""" 

self.changeCol = changeCol 

self.timeCol = timeCol 

41 ↛ 43line 41 didn't jump to line 43, because the condition on line 41 was never false if metricName is None: 

metricName = 'Minimum time between %s changes (minutes)' % (changeCol) 

super(MinTimeBetweenStatesMetric, self).__init__(col=[changeCol, timeCol], metricName=metricName, 

units='', **kwargs) 

 

def run(self, dataSlice, slicePoint=None): 

# Sort on time, to be sure we've got filter (or other col) changes in the right order. 

idxs = np.argsort(dataSlice[self.timeCol]) 

changes = (dataSlice[self.changeCol][idxs][1:] != dataSlice[self.changeCol][idxs][:-1]) 

condition = np.where(changes == True)[0] 

changetimes = dataSlice[self.timeCol][idxs][1:][condition] 

prevchangetime = np.concatenate((np.array([dataSlice[self.timeCol][idxs][0]]), 

dataSlice[self.timeCol][idxs][1:][condition][:-1])) 

dtimes = changetimes - prevchangetime 

dtimes *= 24*60 

if dtimes.size == 0: 

return self.badval 

return dtimes.min() 

 

 

class NStateChangesFasterThanMetric(BaseMetric): 

""" 

Compute the number of changes of state that happen faster than 'cutoff'. 

(useful for calculating time between filter changes in particular). 

'cutoff' should be in minutes. 

""" 

def __init__(self, changeCol='filter', timeCol='observationStartMJD', metricName=None, cutoff=20, 

**kwargs): 

""" 

col = column tracking changes in 

timeCol = column keeping the time of each visit 

cutoff = the cutoff value for the reduce method 'NBelow' 

""" 

74 ↛ 76line 74 didn't jump to line 76, because the condition on line 74 was never false if metricName is None: 

metricName = 'Number of %s changes faster than <%.1f minutes' % (changeCol, cutoff) 

self.changeCol = changeCol 

self.timeCol = timeCol 

self.cutoff = cutoff/24.0/60.0 # Convert cutoff from minutes to days. 

super(NStateChangesFasterThanMetric, self).__init__(col=[changeCol, timeCol], 

metricName=metricName, units='#', **kwargs) 

 

def run(self, dataSlice, slicePoint=None): 

# Sort on time, to be sure we've got filter (or other col) changes in the right order. 

idxs = np.argsort(dataSlice[self.timeCol]) 

changes = (dataSlice[self.changeCol][idxs][1:] != dataSlice[self.changeCol][idxs][:-1]) 

condition = np.where(changes == True)[0] 

changetimes = dataSlice[self.timeCol][idxs][1:][condition] 

prevchangetime = np.concatenate((np.array([dataSlice[self.timeCol][idxs][0]]), 

dataSlice[self.timeCol][idxs][1:][condition][:-1])) 

dtimes = changetimes - prevchangetime 

return np.where(dtimes < self.cutoff)[0].size 

 

 

class MaxStateChangesWithinMetric(BaseMetric): 

""" 

Compute the maximum number of changes of state that occur within a given timespan. 

(useful for calculating time between filter changes in particular). 

'timespan' should be in minutes. 

""" 

def __init__(self, changeCol='filter', timeCol='observationStartMJD', metricName=None, timespan=20, 

**kwargs): 

""" 

col = column tracking changes in 

timeCol = column keeping the time of each visit 

timespan = the timespan to count the number of changes within (in minutes) 

""" 

107 ↛ 109line 107 didn't jump to line 109, because the condition on line 107 was never false if metricName is None: 

metricName = 'Max number of %s changes within %.1f minutes' % (changeCol, timespan) 

self.changeCol = changeCol 

self.timeCol = timeCol 

self.timespan = timespan/24./60. # Convert timespan from minutes to days. 

super(MaxStateChangesWithinMetric, self).__init__(col=[changeCol, timeCol], 

metricName=metricName, units='#', **kwargs) 

 

def run(self, dataSlice, slicePoint=None): 

# This operates slightly differently from the metrics above; those calculate only successive times 

# between changes, but here we must calculate the actual times of each change. 

# Check if there was only one observation (and return 0 if so). 

119 ↛ 120line 119 didn't jump to line 120, because the condition on line 119 was never true if dataSlice[self.changeCol].size == 1: 

return 0 

# Sort on time, to be sure we've got filter (or other col) changes in the right order. 

idxs = np.argsort(dataSlice[self.timeCol]) 

changes = (dataSlice[self.changeCol][idxs][:-1] != dataSlice[self.changeCol][idxs][1:]) 

condition = np.where(changes == True)[0] 

changetimes = dataSlice[self.timeCol][idxs][1:][condition] 

# If there are 0 filter changes ... 

if changetimes.size == 0: 

return 0 

# Otherwise .. 

ct_plus = changetimes + self.timespan 

indx2 = np.searchsorted(changetimes, ct_plus, side='right') 

indx1 = np.arange(changetimes.size) 

nchanges = indx2-indx1 

return nchanges.max() 

 

 

class TeffMetric(BaseMetric): 

""" 

Effective time equivalent for a given set of visits. 

""" 

def __init__(self, m5Col='fiveSigmaDepth', filterCol='filter', metricName='tEff', 

fiducialDepth=None, teffBase=30.0, normed=False, **kwargs): 

self.m5Col = m5Col 

self.filterCol = filterCol 

145 ↛ 146line 145 didn't jump to line 146, because the condition on line 145 was never true if fiducialDepth is None: 

self.depth = {'u': 23.9, 'g': 25.0, 'r': 24.7, 'i': 24.0, 

'z': 23.3, 'y': 22.1} # design value 

else: 

149 ↛ 152line 149 didn't jump to line 152, because the condition on line 149 was never false if isinstance(fiducialDepth, dict): 

self.depth = fiducialDepth 

else: 

raise ValueError('fiducialDepth should be None or dictionary') 

self.teffBase = teffBase 

self.normed = normed 

155 ↛ 156line 155 didn't jump to line 156, because the condition on line 155 was never true if self.normed: 

units = '' 

else: 

units = 'seconds' 

super(TeffMetric, self).__init__(col=[m5Col, filterCol], metricName=metricName, 

units=units, **kwargs) 

161 ↛ 162line 161 didn't jump to line 162, because the condition on line 161 was never true if self.normed: 

self.comment = 'Normalized effective time' 

else: 

self.comment = 'Effect time' 

self.comment += ' of a series of observations, evaluating the equivalent amount of time' 

self.comment += ' each observation would require if taken at a fiducial limiting magnitude.' 

self.comment += ' Fiducial depths are : %s' % self.depth 

168 ↛ 169line 168 didn't jump to line 169, because the condition on line 168 was never true if self.normed: 

self.comment += ' Normalized by the total amount of time actual on-sky.' 

 

def run(self, dataSlice, slicePoint=None): 

filters = np.unique(dataSlice[self.filterCol]) 

teff = 0.0 

for f in filters: 

match = np.where(dataSlice[self.filterCol] == f)[0] 

teff += (10.0**(0.8*(dataSlice[self.m5Col][match] - self.depth[f]))).sum() 

teff *= self.teffBase 

178 ↛ 180line 178 didn't jump to line 180, because the condition on line 178 was never true if self.normed: 

# Normalize by the t_eff if each observation was at the fiducial depth. 

teff = teff / (self.teffBase*dataSlice[self.m5Col].size) 

return teff 

 

 

class OpenShutterFractionMetric(BaseMetric): 

""" 

Compute the fraction of time the shutter is open compared to the total time spent observing. 

""" 

def __init__(self, metricName='OpenShutterFraction', 

slewTimeCol='slewTime', expTimeCol='visitExposureTime', visitTimeCol='visitTime', 

**kwargs): 

self.expTimeCol = expTimeCol 

self.visitTimeCol = visitTimeCol 

self.slewTimeCol = slewTimeCol 

super(OpenShutterFractionMetric, self).__init__(col=[self.expTimeCol, self.visitTimeCol, 

self.slewTimeCol], 

metricName=metricName, units='OpenShutter/TotalTime', 

**kwargs) 

self.comment = 'Open shutter time (%s total) divided by total visit time ' \ 

'(%s) + slewtime (%s).' %(self.expTimeCol, self.visitTimeCol, self.slewTimeCol) 

 

def run(self, dataSlice, slicePoint=None): 

result = (np.sum(dataSlice[self.expTimeCol]) / 

np.sum(dataSlice[self.slewTimeCol] + dataSlice[self.visitTimeCol])) 

return result 

 

 

class CompletenessMetric(BaseMetric): 

"""Compute the completeness and joint completeness """ 

def __init__(self, filterColName='filter', metricName='Completeness', 

u=0, g=0, r=0, i=0, z=0, y=0, **kwargs): 

""" 

Compute the completeness for the each of the given filters and the 

joint completeness across all filters. 

 

Completeness calculated in any filter with a requested 'nvisits' value greater than 0, range is 0-1. 

""" 

self.filterCol = filterColName 

super(CompletenessMetric, self).__init__(col=self.filterCol, metricName=metricName, **kwargs) 

self.nvisitsRequested = np.array([u, g, r, i, z, y]) 

self.filters = np.array(['u', 'g', 'r', 'i', 'z', 'y']) 

# Remove filters from consideration where number of visits requested is zero. 

good = np.where(self.nvisitsRequested > 0) 

self.nvisitsRequested = self.nvisitsRequested[good] 

self.filters = self.filters[good] 

# Raise exception if number of visits wasn't changed from the default, for at least one filter. 

if len(self.filters) == 0: 

raise ValueError('Please set the requested number of visits for at least one filter.') 

# Set reduce order, for display purposes. 

for i, f in enumerate(['u', 'g', 'r', 'i', 'z', 'y', 'Joint']): 

self.reduceOrder[f] = i 

self.comment = 'Completeness fraction for each filter (and joint across all filters), calculated' 

self.comment += ' as the number of visits compared to a benchmark value of :' 

for i, f in enumerate(self.filters): 

self.comment += ' %s: %d' % (f, self.nvisitsRequested[i]) 

self.comment += '.' 

 

def run(self, dataSlice, slicePoint=None): 

""" 

Compute the completeness for each filter, and then the minimum (joint) completeness for each slice. 

""" 

allCompleteness = [] 

for f, nVis in zip(self.filters, self.nvisitsRequested): 

filterVisits = np.size(np.where(dataSlice[self.filterCol] == f)[0]) 

allCompleteness.append(filterVisits/np.float(nVis)) 

allCompleteness.append(np.min(np.array(allCompleteness))) 

return np.array(allCompleteness) 

 

def reduceu(self, completeness): 

if 'u' in self.filters: 

return completeness[np.where(self.filters == 'u')[0]] 

else: 

return 1 

 

def reduceg(self, completeness): 

255 ↛ 258line 255 didn't jump to line 258, because the condition on line 255 was never false if 'g' in self.filters: 

return completeness[np.where(self.filters == 'g')[0]] 

else: 

return 1 

 

def reducer(self, completeness): 

261 ↛ 264line 261 didn't jump to line 264, because the condition on line 261 was never false if 'r' in self.filters: 

return completeness[np.where(self.filters == 'r')[0]] 

else: 

return 1 

 

def reducei(self, completeness): 

267 ↛ 270line 267 didn't jump to line 270, because the condition on line 267 was never false if 'i' in self.filters: 

return completeness[np.where(self.filters == 'i')[0]] 

else: 

return 1 

 

def reducez(self, completeness): 

273 ↛ 276line 273 didn't jump to line 276, because the condition on line 273 was never false if 'z' in self.filters: 

return completeness[np.where(self.filters == 'z')[0]] 

else: 

return 1 

 

def reducey(self, completeness): 

279 ↛ 282line 279 didn't jump to line 282, because the condition on line 279 was never false if 'y' in self.filters: 

return completeness[np.where(self.filters == 'y')[0]] 

else: 

return 1 

 

def reduceJoint(self, completeness): 

""" 

The joint completeness is just the minimum completeness for a point/field. 

""" 

return completeness[-1] 

 

 

class FilterColorsMetric(BaseMetric): 

""" 

Calculate an RGBA value that accounts for the filters used up to time t0. 

""" 

def __init__(self, rRGB='rRGB', gRGB='gRGB', bRGB='bRGB', 

timeCol='observationStartMJD', t0=None, tStep=40./60./60./24., 

metricName='FilterColors', **kwargs): 

""" 

t0 = the current time 

""" 

self.rRGB = rRGB 

self.bRGB = bRGB 

self.gRGB = gRGB 

self.timeCol = timeCol 

self.t0 = t0 

if self.t0 is None: 

self.t0 = 59580 

self.tStep = tStep 

super(FilterColorsMetric, self).__init__(col=[rRGB, gRGB, bRGB, timeCol], 

metricName=metricName, **kwargs) 

self.metricDtype = 'object' 

self.comment = 'Metric specifically to generate colors for the opsim movie' 

 

def _scaleColor(self, colorR, colorG, colorB): 

r = colorR.sum() 

g = colorG.sum() 

b = colorB.sum() 

scale = 1. / np.max([r, g, b]) 

r *= scale 

g *= scale 

b *= scale 

return r, g, b 

 

def run(self, dataSlice, slicePoint=None): 

deltaT = np.abs(dataSlice[self.timeCol]-self.t0) 

visitNow = np.where(deltaT <= self.tStep)[0] 

if len(visitNow) > 0: 

# We have exact matches to this timestep, so use their colors directly and set alpha to >1. 

r, g, b = self._scaleColor(dataSlice[visitNow][self.rRGB], 

dataSlice[visitNow][self.gRGB], 

dataSlice[visitNow][self.bRGB]) 

alpha = 10. 

else: 

# This part of the sky has only older exposures. 

deltaTmin = deltaT.min() 

nObs = len(dataSlice[self.timeCol]) 

# Generate a combined color (weighted towards most recent observation). 

decay = deltaTmin/deltaT 

r, g, b = self._scaleColor(dataSlice[self.rRGB]*decay, 

dataSlice[self.gRGB]*decay, 

dataSlice[self.bRGB]*decay) 

# Then generate an alpha value, between alphamax/alphamid for visits 

# happening within the previous 12 hours, then falling between 

# alphamid/alphamin with a value that depends on the number of obs. 

alphamax = 0.8 

alphamid = 0.5 

alphamin = 0.2 

if deltaTmin < 0.5: 

alpha = np.exp(-deltaTmin*10.)*(alphamax - alphamid) + alphamid 

else: 

alpha = nObs/800.*alphamid 

alpha = np.max([alpha, alphamin]) 

alpha = np.min([alphamax, alpha]) 

return (r, g, b, alpha) 

 

 

class BruteOSFMetric(BaseMetric): 

"""Assume I can't trust the slewtime or visittime colums. 

This computes the fraction of time the shutter is open, with no penalty for the first exposure 

after a long gap (e.g., 1st exposure of the night). Presumably, the telescope will need to focus, 

so there's not much a scheduler could do to optimize keeping the shutter open after a closure. 

""" 

def __init__(self, metricName='BruteOSFMetric', 

expTimeCol='visitExposureTime', mjdCol='observationStartMJD', maxgap=10., 

fudge=0., **kwargs): 

""" 

Parameters 

---------- 

maxgap : float (10.) 

The maximum gap between observations. Assume anything longer the dome has closed. 

fudge : float (0.) 

Fudge factor if a constant has to be added to the exposure time values (like in OpSim 3.61). 

expTimeCol : str ('expTime') 

The name of the exposure time column. Assumed to be in seconds. 

mjdCol : str ('observationStartMJD') 

The name of the start of the exposures. Assumed to be in units of days. 

""" 

self.expTimeCol = expTimeCol 

self.maxgap = maxgap/60./24. # convert from min to days 

self.mjdCol = mjdCol 

self.fudge = fudge 

super(BruteOSFMetric, self).__init__(col=[self.expTimeCol, mjdCol], 

metricName=metricName, units='OpenShutter/TotalTime', 

**kwargs) 

 

def run(self, dataSlice, slicePoint=None): 

times = np.sort(dataSlice[self.mjdCol]) 

diff = np.diff(times) 

good = np.where(diff < self.maxgap) 

openTime = np.sum(diff[good])*24.*3600. 

result = np.sum(dataSlice[self.expTimeCol]+self.fudge) / float(openTime) 

return result