Coverage for python/lsst/sims/maf/metrics/cadenceMetrics.py : 19%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1import numpy as np
2from .baseMetric import BaseMetric
4__all__ = ['TemplateExistsMetric', 'UniformityMetric',
5 'RapidRevisitUniformityMetric', 'RapidRevisitMetric','NRevisitsMetric', 'IntraNightGapsMetric',
6 'InterNightGapsMetric', 'VisitGapMetric', 'SeasonLengthMetric']
9class fSMetric(BaseMetric):
10 """Calculate the fS value (Nvisit-weighted delta(M5-M5srd)).
11 """
12 def __init__(self, filterCol='filter', metricName='fS', **kwargs):
13 self.filterCol = filterCol
14 cols = [self.filterCol]
15 super().__init__(cols=cols, metricName=metricName, units='fS', **kwargs)
17 def run(self, dataSlice, slicePoint=None):
18 """"Calculate the fS (reserve above/below the m5 values from the LSST throughputs)
20 Parameters
21 ----------
22 dataSlice : numpy.array
23 Numpy structured array containing the data related to the visits provided by the slicer.
24 slicePoint : dict, optional
25 Dictionary containing information about the slicepoint currently active in the slicer.
27 Returns
28 -------
29 float
30 The fS value.
31 """
32 # We could import this from the m5_flat_sed values, but it makes sense to calculate the m5
33 # directly from the throughputs. This is easy enough to do and will allow variation of
34 # the throughput curves and readnoise and visit length, etc.
35 pass
38class TemplateExistsMetric(BaseMetric):
39 """Calculate the fraction of images with a previous template image of desired quality.
40 """
41 def __init__(self, seeingCol='seeingFwhmGeom', observationStartMJDCol='observationStartMJD',
42 metricName='TemplateExistsMetric', **kwargs):
43 cols = [seeingCol, observationStartMJDCol]
44 super(TemplateExistsMetric, self).__init__(col=cols, metricName=metricName,
45 units='fraction', **kwargs)
46 self.seeingCol = seeingCol
47 self.observationStartMJDCol = observationStartMJDCol
49 def run(self, dataSlice, slicePoint=None):
50 """"Calculate the fraction of images with a previous template image of desired quality.
52 Parameters
53 ----------
54 dataSlice : numpy.array
55 Numpy structured array containing the data related to the visits provided by the slicer.
56 slicePoint : dict, optional
57 Dictionary containing information about the slicepoint currently active in the slicer.
59 Returns
60 -------
61 float
62 The fraction of images with a 'good' previous template image.
63 """
64 # Check that data is sorted in observationStartMJD order
65 dataSlice.sort(order=self.observationStartMJDCol)
66 # Find the minimum seeing up to a given time
67 seeing_mins = np.minimum.accumulate(dataSlice[self.seeingCol])
68 # Find the difference between the seeing and the minimum seeing at the previous visit
69 seeing_diff = dataSlice[self.seeingCol] - np.roll(seeing_mins, 1)
70 # First image never has a template; check how many others do
71 good = np.where(seeing_diff[1:] >= 0.)[0]
72 frac = (good.size) / float(dataSlice[self.seeingCol].size)
73 return frac
76class UniformityMetric(BaseMetric):
77 """Calculate how uniformly the observations are spaced in time.
78 Returns a value between -1 and 1.
79 A value of zero means the observations are perfectly uniform.
81 Parameters
82 ----------
83 surveyLength : float, optional
84 The overall duration of the survey. Default 10.
85 """
86 def __init__(self, mjdCol='observationStartMJD', units='',
87 surveyLength=10., **kwargs):
88 """surveyLength = time span of survey (years) """
89 self.mjdCol = mjdCol
90 super(UniformityMetric, self).__init__(col=self.mjdCol, units=units, **kwargs)
91 self.surveyLength = surveyLength
93 def run(self, dataSlice, slicePoint=None):
94 """"Calculate the survey uniformity.
96 This is based on how a KS-test works: look at the cumulative distribution of observation dates,
97 and compare to a perfectly uniform cumulative distribution.
98 Perfectly uniform observations = 0, perfectly non-uniform = 1.
100 Parameters
101 ----------
102 dataSlice : numpy.array
103 Numpy structured array containing the data related to the visits provided by the slicer.
104 slicePoint : dict, optional
105 Dictionary containing information about the slicepoint currently active in the slicer.
107 Returns
108 -------
109 float
110 Uniformity of 'observationStartMJDCol'.
111 """
112 # If only one observation, there is no uniformity
113 if dataSlice[self.mjdCol].size == 1:
114 return 1
115 # Scale dates to lie between 0 and 1, where 0 is the first observation date and 1 is surveyLength
116 dates = (dataSlice[self.mjdCol] - dataSlice[self.mjdCol].min()) / \
117 (self.surveyLength * 365.25)
118 dates.sort() # Just to be sure
119 n_cum = np.arange(1, dates.size + 1) / float(dates.size)
120 D_max = np.max(np.abs(n_cum - dates - dates[1]))
121 return D_max
124class RapidRevisitUniformityMetric(BaseMetric):
125 """Calculate uniformity of time between consecutive visits on short timescales (for RAV1).
127 Parameters
128 ----------
129 mjdCol : str, optional
130 The column containing the 'time' value. Default observationStartMJD.
131 minNvisits : int, optional
132 The minimum number of visits required within the time interval (dTmin to dTmax).
133 Default 100.
134 dTmin : float, optional
135 The minimum dTime to consider (in days). Default 40 seconds.
136 dTmax : float, optional
137 The maximum dTime to consider (in days). Default 30 minutes.
138 """
139 def __init__(self, mjdCol='observationStartMJD', minNvisits=100,
140 dTmin=40.0 / 60.0 / 60.0 / 24.0, dTmax=30.0 / 60.0 / 24.0,
141 metricName='RapidRevisitUniformity', **kwargs):
142 self.mjdCol = mjdCol
143 self.minNvisits = minNvisits
144 self.dTmin = dTmin
145 self.dTmax = dTmax
146 super().__init__(col=self.mjdCol, metricName=metricName, **kwargs)
147 # Update minNvisits, as 0 visits will crash algorithm and 1 is nonuniform by definition.
148 if self.minNvisits <= 1:
149 self.minNvisits = 2
151 def run(self, dataSlice, slicePoint=None):
152 """Calculate the uniformity of visits within dTmin to dTmax.
154 Uses a the same 'uniformity' calculation as the UniformityMetric, based on the KS-test.
155 A value of 0 is perfectly uniform; a value of 1 is purely non-uniform.
157 Parameters
158 ----------
159 dataSlice : numpy.array
160 Numpy structured array containing the data related to the visits provided by the slicer.
161 slicePoint : dict, optional
162 Dictionary containing information about the slicepoint currently active in the slicer.
164 Returns
165 -------
166 float
167 The uniformity measurement of the visits within time interval dTmin to dTmax.
168 """
169 # Calculate consecutive visit time intervals
170 dtimes = np.diff(np.sort(dataSlice[self.mjdCol]))
171 # Identify dtimes within interval from dTmin/dTmax.
172 good = np.where((dtimes >= self.dTmin) & (dtimes <= self.dTmax))[0]
173 # If there are not enough visits in this time range, return bad value.
174 if good.size < self.minNvisits:
175 return self.badval
176 # Throw out dtimes outside desired range, and sort, then scale to 0-1.
177 dtimes = np.sort(dtimes[good])
178 dtimes = (dtimes - dtimes.min()) / float(self.dTmax - self.dTmin)
179 # Set up a uniform distribution between 0-1 (to match dtimes).
180 uniform_dtimes = np.arange(1, dtimes.size + 1, 1) / float(dtimes.size)
181 # Look at the differences between our times and the uniform times.
182 dmax = np.max(np.abs(uniform_dtimes - dtimes - dtimes[1]))
183 return dmax
186class RapidRevisitMetric(BaseMetric):
187 def __init__(self, mjdCol='observationStartMJD', metricName='RapidRevisit',
188 dTmin=40.0 / 60.0 / 60.0 / 24.0, dTpairs = 20.0 / 60.0 / 24.0,
189 dTmax = 30.0 / 60.0 / 24.0, minN1 = 28, minN2 = 82, **kwargs):
190 self.mjdCol = mjdCol
191 self.dTmin = dTmin
192 self.dTpairs = dTpairs
193 self.dTmax = dTmax
194 self.minN1 = minN1
195 self.minN2 = minN2
196 super().__init__(col=self.mjdCol, metricName=metricName, **kwargs)
198 def run(self, dataSlice, slicePoint=None):
199 dtimes = np.diff(np.sort(dataSlice[self.mjdCol]))
200 N1 = len(np.where((dtimes >= self.dTmin) & (dtimes <= self.dTpairs))[0])
201 N2 = len(np.where((dtimes >= self.dTmin) & (dtimes <= self.dTmax))[0])
202 if (N1 >= self.minN1) and (N2 >= self.minN2):
203 val = 1
204 else:
205 val = 0
206 return val
209class NRevisitsMetric(BaseMetric):
210 """Calculate the number of consecutive visits with time differences less than dT.
212 Parameters
213 ----------
214 dT : float, optional
215 The time interval to consider (in minutes). Default 30.
216 normed : bool, optional
217 Flag to indicate whether to return the total number of consecutive visits with time
218 differences less than dT (False), or the fraction of overall visits (True).
219 Note that we would expect (if all visits occur in pairs within dT) this fraction would be 0.5!
220 """
221 def __init__(self, mjdCol='observationStartMJD', dT=30.0, normed=False, metricName=None, **kwargs):
222 units = ''
223 if metricName is None:
224 if normed:
225 metricName = 'Fraction of revisits faster than %.1f minutes' % (dT)
226 else:
227 metricName = 'Number of revisits faster than %.1f minutes' % (dT)
228 units = '#'
229 self.mjdCol = mjdCol
230 self.dT = dT / 60. / 24. # convert to days
231 self.normed = normed
232 super(NRevisitsMetric, self).__init__(col=self.mjdCol, units=units, metricName=metricName, **kwargs)
234 def run(self, dataSlice, slicePoint=None):
235 """Count the number of consecutive visits occuring within time intervals dT.
237 Parameters
238 ----------
239 dataSlice : numpy.array
240 Numpy structured array containing the data related to the visits provided by the slicer.
241 slicePoint : dict, optional
242 Dictionary containing information about the slicepoint currently active in the slicer.
244 Returns
245 -------
246 float
247 Either the total number of consecutive visits within dT or the fraction compared to overall visits.
248 """
249 dtimes = np.diff(np.sort(dataSlice[self.mjdCol]))
250 nFastRevisits = np.size(np.where(dtimes <= self.dT)[0])
251 if self.normed:
252 nFastRevisits = nFastRevisits / float(np.size(dataSlice[self.mjdCol]))
253 return nFastRevisits
256class IntraNightGapsMetric(BaseMetric):
257 """
258 Calculate the gap between consecutive observations within a night, in hours.
260 Parameters
261 ----------
262 reduceFunc : function, optional
263 Function that can operate on array-like structures. Typically numpy function.
264 Default np.median.
265 """
267 def __init__(self, mjdCol='observationStartMJD', nightCol='night', reduceFunc=np.median,
268 metricName='Median Intra-Night Gap', **kwargs):
269 units = 'hours'
270 self.mjdCol = mjdCol
271 self.nightCol = nightCol
272 self.reduceFunc = reduceFunc
273 super(IntraNightGapsMetric, self).__init__(col=[self.mjdCol, self.nightCol],
274 units=units, metricName=metricName, **kwargs)
276 def run(self, dataSlice, slicePoint=None):
277 """Calculate the (reduceFunc) of the gap between consecutive obervations within a night.
279 Parameters
280 ----------
281 dataSlice : numpy.array
282 Numpy structured array containing the data related to the visits provided by the slicer.
283 slicePoint : dict, optional
284 Dictionary containing information about the slicepoint currently active in the slicer.
286 Returns
287 -------
288 float
289 The (reduceFunc) value of the gap, in hours.
290 """
291 dataSlice.sort(order=self.mjdCol)
292 dt = np.diff(dataSlice[self.mjdCol])
293 dn = np.diff(dataSlice[self.nightCol])
295 good = np.where(dn == 0)
296 if np.size(good[0]) == 0:
297 result = self.badval
298 else:
299 result = self.reduceFunc(dt[good]) * 24
300 return result
303class InterNightGapsMetric(BaseMetric):
304 """
305 Calculate the gap between consecutive observations in different nights, in days.
307 Parameters
308 ----------
309 reduceFunc : function, optional
310 Function that can operate on array-like structures. Typically numpy function.
311 Default np.median.
312 """
313 def __init__(self, mjdCol='observationStartMJD', nightCol='night', reduceFunc=np.median,
314 metricName='Median Inter-Night Gap', **kwargs):
315 units = 'days'
316 self.mjdCol = mjdCol
317 self.nightCol = nightCol
318 self.reduceFunc = reduceFunc
319 super(InterNightGapsMetric, self).__init__(col=[self.mjdCol, self.nightCol],
320 units=units, metricName=metricName, **kwargs)
322 def run(self, dataSlice, slicePoint=None):
323 """Calculate the (reduceFunc) of the gap between consecutive nights of observations.
324 Parameters
325 ----------
326 dataSlice : numpy.array
327 Numpy structured array containing the data related to the visits provided by the slicer.
328 slicePoint : dict, optional
329 Dictionary containing information about the slicepoint currently active in the slicer.
331 Returns
332 -------
333 float
334 The (reduceFunc) of the gap between consecutive nights of observations, in days.
335 """
336 dataSlice.sort(order=self.mjdCol)
337 unights = np.unique(dataSlice[self.nightCol])
338 if np.size(unights) < 2:
339 result = self.badval
340 else:
341 # Find the first and last observation of each night
342 firstOfNight = np.searchsorted(dataSlice[self.nightCol], unights)
343 lastOfNight = np.searchsorted(dataSlice[self.nightCol], unights, side='right') - 1
344 diff = dataSlice[self.mjdCol][firstOfNight[1:]] - dataSlice[self.mjdCol][lastOfNight[:-1]]
345 result = self.reduceFunc(diff)
346 return result
349class VisitGapMetric(BaseMetric):
350 """
351 Calculate the gap between any consecutive observations, in hours, regardless of night boundaries.
353 Parameters
354 ----------
355 reduceFunc : function, optional
356 Function that can operate on array-like structures. Typically numpy function.
357 Default np.median.
358 """
359 def __init__(self, mjdCol='observationStartMJD', nightCol='night', reduceFunc=np.median,
360 metricName='VisitGap', **kwargs):
361 units = 'hours'
362 self.mjdCol = mjdCol
363 self.nightCol = nightCol
364 self.reduceFunc = reduceFunc
365 super().__init__(col=[self.mjdCol, self.nightCol],
366 units=units, metricName=metricName, **kwargs)
368 def run(self, dataSlice, slicePoint=None):
369 """Calculate the (reduceFunc) of the gap between consecutive observations.
371 Different from inter-night and intra-night gaps, between this is really just counting
372 all of the times between consecutive observations (not time between nights or time within a night).
374 Parameters
375 ----------
376 dataSlice : numpy.array
377 Numpy structured array containing the data related to the visits provided by the slicer.
378 slicePoint : dict, optional
379 Dictionary containing information about the slicepoint currently active in the slicer.
381 Returns
382 -------
383 float
384 The (reduceFunc) of the time between consecutive observations, in hours.
385 """
386 dataSlice.sort(order=self.mjdCol)
387 diff = np.diff(dataSlice[self.mjdCol])
388 result = self.reduceFunc(diff) * 24.
389 return result
391class SeasonLengthMetric(BaseMetric):
392 """
393 Calculate the length of LSST seasons, in days.
395 Parameters
396 ----------
397 reduceFunc : function, optional
398 Function that can operate on array-like structures. Typically numpy function.
399 This reduces the season length in each season from 10 separate values to a single value.
400 Default np.median.
401 """
402 def __init__(self, mjdCol='observationStartMJD', reduceFunc=np.median,
403 metricName='SeasonLength', **kwargs):
404 units = 'days'
405 self.mjdCol = mjdCol
406 self.reduceFunc = reduceFunc
407 super().__init__(col=[self.mjdCol],
408 units=units, metricName=metricName, **kwargs)
409 # objRA=0 on autumnal equinox.
410 # autumnal equinox 2014 happened on Sept 23 --> Equinox MJD
411 self.Equinox = 2456923.5 - 2400000.5
413 def run(self, dataSlice, slicePoint):
414 """Calculate the (reduceFunc) of the length of each season.
415 Uses the slicePoint RA/Dec to calculate the position in question, then uses the times of the visits
416 to assign them into seasons (based on where the sun is relative to the slicePoint Ra/Dec).
418 Parameters
419 ----------
420 dataSlice : numpy.array
421 Numpy structured array containing the data related to the visits provided by the slicer.
422 slicePoint : dict
423 Dictionary containing information about the slicepoint currently active in the slicer.
425 Returns
426 -------
427 float
428 The (reduceFunc) of the length of each season, in days.
429 """
430 dataSlice.sort(order=self.mjdCol)
431 # SlicePoints ra/dec are always in radians - convert to HOURS for this (b/c time)
432 objRA = np.degrees(slicePoint['ra']) / 15.0
433 # objRA=0 on autumnal equinox.
434 # autumnal equinox 2014 happened on Sept 23 --> Equinox MJD
435 # Use 365.25 for the length of a year here, because we're dealing with real seasons.
436 daysSinceEquinox = 0.5*objRA*(365.25/12.0) # 0.5 to go from RA to month; 365.25/12.0 months to days
437 firstSeasonBegan = self.Equinox + daysSinceEquinox - 0.5*365.25 # in MJD
438 # Now we can compute the number of years since the first season
439 # began, and so assign a global integer season number:
440 globalSeason = np.floor((dataSlice[self.mjdCol] - firstSeasonBegan)/365.25)
441 # Subtract off season number of first observation:
442 seasons = globalSeason - np.min(globalSeason)
443 # Get the unique seasons, so that we can separate each one
444 season_list = np.unique(seasons)
445 # Find the first and last observation of each season.
446 firstOfSeason= np.searchsorted(seasons, season_list)
447 lastOfSeason = np.searchsorted(seasons, season_list, side='right') - 1
449 seasonlength = dataSlice[self.mjdCol][lastOfSeason] - dataSlice[self.mjdCol][firstOfSeason]
450 result = self.reduceFunc(seasonlength)
451 return result