Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1import os 

2import sqlite3 as db 

3import datetime 

4import socket 

5import numpy as np 

6import healpy as hp 

7import pandas as pd 

8import matplotlib.path as mplPath 

9from lsst.sims.utils import _hpid2RaDec, xyz_angular_radius, _buildTree, _xyz_from_ra_dec 

10from lsst.sims.featureScheduler import version 

11from lsst.sims.survey.fields import FieldsDatabase 

12 

13 

14class int_rounded(object): 

15 """ 

16 Class to help force comparisons be made on scaled up integers, 

17 preventing machine precision issues cross-platforms 

18 

19 Parameters 

20 ---------- 

21 inval : number-like thing 

22 Some number that we want to compare 

23 scale : float (1e5) 

24 How much to scale inval before rounding and converting to an int. 

25 """ 

26 def __init__(self, inval, scale=1e5): 

27 self.initial = inval 

28 self.value = np.round(inval * scale).astype(int) 

29 self.scale = scale 

30 

31 def __eq__(self, other): 

32 return self.value == other.value 

33 

34 def __ne__(self, other): 

35 return self.value != other.value 

36 

37 def __lt__(self, other): 

38 return self.value < other.value 

39 

40 def __le__(self, other): 

41 return self.value <= other.value 

42 

43 def __gt__(self, other): 

44 return self.value > other.value 

45 

46 def __ge__(self, other): 

47 return self.value >= other.value 

48 

49 def __repr__(self): 

50 return str(self.initial) 

51 

52 def __add__(self, other): 

53 out_scale = np.min([self.scale, other.scale]) 

54 result = int_rounded(self.initial + other.initial, scale=out_scale) 

55 return result 

56 

57 def __sub__(self, other): 

58 out_scale = np.min([self.scale, other.scale]) 

59 result = int_rounded(self.initial - other.initial, scale=out_scale) 

60 return result 

61 

62 def __mul__(self, other): 

63 out_scale = np.min([self.scale, other.scale]) 

64 result = int_rounded(self.initial * other.initial, scale=out_scale) 

65 return result 

66 

67 def __div__(self, other): 

68 out_scale = np.min([self.scale, other.scale]) 

69 result = int_rounded(self.initial / other.initial, scale=out_scale) 

70 return result 

71 

72 

73def set_default_nside(nside=None): 

74 """ 

75 Utility function to set a default nside value across the scheduler. 

76 

77 XXX-there might be a better way to do this. 

78 

79 Parameters 

80 ---------- 

81 nside : int (None) 

82 A valid healpixel nside. 

83 """ 

84 if not hasattr(set_default_nside, 'nside'): 84 ↛ 88line 84 didn't jump to line 88, because the condition on line 84 was never false

85 if nside is None: 85 ↛ 87line 85 didn't jump to line 87, because the condition on line 85 was never false

86 nside = 32 

87 set_default_nside.nside = nside 

88 if nside is not None: 88 ↛ 90line 88 didn't jump to line 90, because the condition on line 88 was never false

89 set_default_nside.nside = nside 

90 return set_default_nside.nside 

91 

92 

93def restore_scheduler(observationId, scheduler, observatory, filename, filter_sched=None): 

94 """Put the scheduler and observatory in the state they were in. Handy for checking reward fucnction 

95 

96 Parameters 

97 ---------- 

98 observationId : int 

99 The ID of the last observation that should be completed 

100 scheduler : lsst.sims.featureScheduler.scheduler object 

101 Scheduler object. 

102 observatory : lsst.sims.featureSchedler.observatory.Model_observatory 

103 The observaotry object 

104 filename : str 

105 The output sqlite dayabase to use 

106 filter_sched : lsst.sims.featureScheduler.scheduler object 

107 The filter scheduler. Note that we don't look up the official end of the previous night, 

108 so there is potential for the loaded filters to not match. 

109 """ 

110 sc = schema_converter() 

111 # load up the observations 

112 observations = sc.opsim2obs(filename) 

113 good_obs = np.where(observations['ID'] <= observationId)[0] 

114 observations = observations[good_obs] 

115 

116 # replay the observations back into the scheduler 

117 for obs in observations: 

118 scheduler.add_observation(obs) 

119 if filter_sched is not None: 

120 filter_sched.add_observation(obs) 

121 

122 if filter_sched is not None: 

123 # Make sure we have mounted the right filters for the night 

124 # XXX--note, this might not be exact, but should work most of the time. 

125 mjd_start_night = np.min(observations['mjd'][np.where(observations['night'] == obs['night'])]) 

126 observatory.mjd = mjd_start_night 

127 conditions = observatory.return_conditions() 

128 filters_needed = filter_sched(conditions) 

129 else: 

130 filters_needed = ['u', 'g', 'r', 'i', 'y'] 

131 

132 # update the observatory 

133 observatory.mjd = obs['mjd'] + observatory.observatory.visit_time(obs)/3600./24. 

134 observatory.observatory.parked = False 

135 observatory.observatory.current_RA_rad = obs['RA'] 

136 observatory.observatory.current_dec_rad = obs['dec'] 

137 observatory.observatory.current_rotSkyPos_rad = obs['rotSkyPos'] 

138 observatory.observatory.cumulative_azimuth_rad = obs['cummTelAz'] 

139 observatory.observatory.mounted_filters = filters_needed 

140 # Note that we haven't updated last_az_rad, etc, but those values should be ignored. 

141 

142 return scheduler, observatory 

143 

144 

145def int_binned_stat(ids, values, statistic=np.mean): 

146 """ 

147 Like scipy.binned_statistic, but for unique int ids 

148 """ 

149 

150 uids = np.unique(ids) 

151 order = np.argsort(ids) 

152 

153 ordered_ids = ids[order] 

154 ordered_values = values[order] 

155 

156 left = np.searchsorted(ordered_ids, uids, side='left') 

157 right = np.searchsorted(ordered_ids, uids, side='right') 

158 

159 stat_results = [] 

160 for le, ri in zip(left, right): 

161 stat_results.append(statistic(ordered_values[le:ri])) 

162 

163 return uids, np.array(stat_results) 

164 

165 

166def gnomonic_project_toxy(RA1, Dec1, RAcen, Deccen): 

167 """Calculate x/y projection of RA1/Dec1 in system with center at RAcen, Deccen. 

168 Input radians. Grabbed from sims_selfcal""" 

169 # also used in Global Telescope Network website 

170 cosc = np.sin(Deccen) * np.sin(Dec1) + np.cos(Deccen) * np.cos(Dec1) * np.cos(RA1-RAcen) 

171 x = np.cos(Dec1) * np.sin(RA1-RAcen) / cosc 

172 y = (np.cos(Deccen)*np.sin(Dec1) - np.sin(Deccen)*np.cos(Dec1)*np.cos(RA1-RAcen)) / cosc 

173 return x, y 

174 

175 

176def gnomonic_project_tosky(x, y, RAcen, Deccen): 

177 """Calculate RA/Dec on sky of object with x/y and RA/Cen of field of view. 

178 Returns Ra/Dec in radians.""" 

179 denom = np.cos(Deccen) - y * np.sin(Deccen) 

180 RA = RAcen + np.arctan2(x, denom) 

181 Dec = np.arctan2(np.sin(Deccen) + y * np.cos(Deccen), np.sqrt(x*x + denom*denom)) 

182 return RA, Dec 

183 

184 

185def match_hp_resolution(in_map, nside_out, UNSEEN2nan=True): 

186 """Utility to convert healpix map resolution if needed and change hp.UNSEEN values to 

187 np.nan. 

188 

189 Parameters 

190 ---------- 

191 in_map : np.array 

192 A valie healpix map 

193 nside_out : int 

194 The desired resolution to convert in_map to 

195 UNSEEN2nan : bool (True) 

196 If True, convert any hp.UNSEEN values to np.nan 

197 """ 

198 current_nside = hp.npix2nside(np.size(in_map)) 

199 if current_nside != nside_out: 

200 out_map = hp.ud_grade(in_map, nside_out=nside_out) 

201 else: 

202 out_map = in_map 

203 if UNSEEN2nan: 

204 out_map[np.where(out_map == hp.UNSEEN)] = np.nan 

205 return out_map 

206 

207 

208def raster_sort(x0, order=['x', 'y'], xbin=1.): 

209 """XXXX--depriciated, use tsp instead. 

210 

211 Do a sort to scan a grid up and down. Simple starting guess to traveling salesman. 

212 

213 Parameters 

214 ---------- 

215 x0 : array 

216 order : list 

217 Keys for the order x0 should be sorted in. 

218 xbin : float (1.) 

219 The binsize to round off the first coordinate into 

220 

221 returns 

222 ------- 

223 array sorted so that it rasters up and down. 

224 """ 

225 coords = x0.copy() 

226 bins = np.arange(coords[order[0]].min()-xbin/2., coords[order[0]].max()+3.*xbin/2., xbin) 

227 # digitize my bins 

228 coords[order[0]] = np.digitize(coords[order[0]], bins) 

229 order1 = np.argsort(coords, order=order) 

230 coords = coords[order1] 

231 places_to_invert = np.where(np.diff(coords[order[-1]]) < 0)[0] 

232 if np.size(places_to_invert) > 0: 

233 places_to_invert += 1 

234 indx = np.arange(coords.size) 

235 index_sorted = np.zeros(indx.size, dtype=int) 

236 index_sorted[0:places_to_invert[0]] = indx[0:places_to_invert[0]] 

237 

238 for i, inv_pt in enumerate(places_to_invert[:-1]): 

239 if i % 2 == 0: 

240 index_sorted[inv_pt:places_to_invert[i+1]] = indx[inv_pt:places_to_invert[i+1]][::-1] 

241 else: 

242 index_sorted[inv_pt:places_to_invert[i+1]] = indx[inv_pt:places_to_invert[i+1]] 

243 

244 if np.size(places_to_invert) % 2 != 0: 

245 index_sorted[places_to_invert[-1]:] = indx[places_to_invert[-1]:][::-1] 

246 else: 

247 index_sorted[places_to_invert[-1]:] = indx[places_to_invert[-1]:] 

248 return order1[index_sorted] 

249 else: 

250 return order1 

251 

252 

253class schema_converter(object): 

254 """ 

255 Record how to convert an observation array to the standard opsim schema 

256 """ 

257 def __init__(self): 

258 # Conversion dictionary, keys are opsim schema, values are observation dtype names 

259 self.convert_dict = {'observationId': 'ID', 'night': 'night', 

260 'observationStartMJD': 'mjd', 

261 'observationStartLST': 'lmst', 'numExposures': 'nexp', 

262 'visitTime': 'visittime', 'visitExposureTime': 'exptime', 

263 'proposalId': 'survey_id', 'fieldId': 'field_id', 

264 'fieldRA': 'RA', 'fieldDec': 'dec', 'altitude': 'alt', 'azimuth': 'az', 

265 'filter': 'filter', 'airmass': 'airmass', 'skyBrightness': 'skybrightness', 

266 'cloud': 'clouds', 'seeingFwhm500': 'FWHM_500', 

267 'seeingFwhmGeom': 'FWHM_geometric', 'seeingFwhmEff': 'FWHMeff', 

268 'fiveSigmaDepth': 'fivesigmadepth', 'slewTime': 'slewtime', 

269 'slewDistance': 'slewdist', 'paraAngle': 'pa', 'rotTelPos': 'rotTelPos', 

270 'rotSkyPos': 'rotSkyPos', 'moonRA': 'moonRA', 

271 'moonDec': 'moonDec', 'moonAlt': 'moonAlt', 'moonAz': 'moonAz', 

272 'moonDistance': 'moonDist', 'moonPhase': 'moonPhase', 

273 'sunAlt': 'sunAlt', 'sunAz': 'sunAz', 'solarElong': 'solarElong', 'note':'note'} 

274 # Column(s) not bothering to remap: 'observationStartTime': None, 

275 self.inv_map = {v: k for k, v in self.convert_dict.items()} 

276 # angles to converts 

277 self.angles_rad2deg = ['fieldRA', 'fieldDec', 'altitude', 'azimuth', 'slewDistance', 

278 'paraAngle', 'rotTelPos', 'rotSkyPos', 'moonRA', 'moonDec', 

279 'moonAlt', 'moonAz', 'moonDistance', 'sunAlt', 'sunAz', 'solarElong', 

280 'cummTelAz'] 

281 # Put LMST into degrees too 

282 self.angles_hours2deg = ['observationStartLST'] 

283 

284 def obs2opsim(self, obs_array, filename=None, info=None, delete_past=False): 

285 """convert an array of observations into a pandas dataframe with Opsim schema 

286 """ 

287 if delete_past: 

288 try: 

289 os.remove(filename) 

290 except OSError: 

291 pass 

292 

293 df = pd.DataFrame(obs_array) 

294 df = df.rename(index=str, columns=self.inv_map) 

295 for colname in self.angles_rad2deg: 

296 df[colname] = np.degrees(df[colname]) 

297 for colname in self.angles_hours2deg: 

298 df[colname] = df[colname] * 360./24. 

299 

300 if filename is not None: 

301 con = db.connect(filename) 

302 df.to_sql('SummaryAllProps', con, index=False) 

303 if info is not None: 

304 df = pd.DataFrame(info) 

305 df.to_sql('info', con) 

306 

307 def opsim2obs(self, filename): 

308 """convert an opsim schema dataframe into an observation array. 

309 """ 

310 

311 con = db.connect(filename) 

312 df = pd.read_sql('select * from SummaryAllProps;', con) 

313 for key in self.angles_rad2deg: 

314 df[key] = np.radians(df[key]) 

315 for key in self.angles_hours2deg: 

316 df[key] = df[key] * 24./360. 

317 

318 df = df.rename(index=str, columns=self.convert_dict) 

319 

320 blank = empty_observation() 

321 final_result = np.empty(df.shape[0], dtype=blank.dtype) 

322 # XXX-ugh, there has to be a better way. 

323 for i, key in enumerate(df.columns): 

324 if key in self.inv_map.keys(): 

325 final_result[key] = df[key].values 

326 

327 return final_result 

328 

329 

330def empty_observation(): 

331 """ 

332 Return a numpy array that could be a handy observation record 

333 

334 XXX: Should this really be "empty visit"? Should we have "visits" made 

335 up of multple "observations" to support multi-exposure time visits? 

336 

337 XXX-Could add a bool flag for "observed". Then easy to track all proposed 

338 observations. Could also add an mjd_min, mjd_max for when an observation should be observed. 

339 That way we could drop things into the queue for DD fields. 

340 

341 XXX--might be nice to add a generic "sched_note" str field, to record any metadata that 

342 would be useful to the scheduler once it's observed. and/or observationID. 

343 

344 Returns 

345 ------- 

346 numpy array 

347 

348 Notes 

349 ----- 

350 The numpy fields have the following structure 

351 RA : float 

352 The Right Acension of the observation (center of the field) (Radians) 

353 dec : float 

354 Declination of the observation (Radians) 

355 mjd : float 

356 Modified Julian Date at the start of the observation (time shutter opens) 

357 exptime : float 

358 Total exposure time of the visit (seconds) 

359 filter : str 

360 The filter used. Should be one of u, g, r, i, z, y. 

361 rotSkyPos : float 

362 The rotation angle of the camera relative to the sky E of N (Radians) 

363 nexp : int 

364 Number of exposures in the visit. 

365 airmass : float 

366 Airmass at the center of the field 

367 FWHMeff : float 

368 The effective seeing FWHM at the center of the field. (arcsec) 

369 skybrightness : float 

370 The surface brightness of the sky background at the center of the 

371 field. (mag/sq arcsec) 

372 night : int 

373 The night number of the observation (days) 

374 flush_by_mjd : float 

375 If we hit this MJD, we should flush the queue and refill it. 

376 cummTelAz : float 

377 The cummulative telescope rotation in azimuth 

378 """ 

379 

380 names = ['ID', 'RA', 'dec', 'mjd', 'flush_by_mjd', 'exptime', 'filter', 'rotSkyPos', 'nexp', 

381 'airmass', 'FWHM_500', 'FWHMeff', 'FWHM_geometric', 'skybrightness', 'night', 

382 'slewtime', 'visittime', 'slewdist', 'fivesigmadepth', 

383 'alt', 'az', 'pa', 'clouds', 'moonAlt', 'sunAlt', 'note', 

384 'field_id', 'survey_id', 'block_id', 

385 'lmst', 'rotTelPos', 'moonAz', 'sunAz', 'sunRA', 'sunDec', 'moonRA', 'moonDec', 

386 'moonDist', 'solarElong', 'moonPhase', 'cummTelAz'] 

387 

388 types = [int, float, float, float, float, float, 'U1', float, int, 

389 float, float, float, float, float, int, 

390 float, float, float, float, 

391 float, float, float, float, float, float, 'U40', 

392 int, int, int, 

393 float, float, float, float, float, float, float, float, 

394 float, float, float, float] 

395 result = np.zeros(1, dtype=list(zip(names, types))) 

396 return result 

397 

398 

399def scheduled_observation(): 

400 """Make an array for pre-scheduling observations 

401 

402 mjd_tol : float 

403 The tolerance on how early an observation can execute (days). 

404 

405 """ 

406 

407 # Standard things from the usual observations 

408 names = ['ID', 'RA', 'dec', 'mjd', 'flush_by_mjd', 'exptime', 'filter', 'rotSkyPos', 'nexp', 

409 'note'] 

410 types = [int, float, float, float, float, float, 'U1', float, float, 'U40'] 

411 names += ['mjd_tol', 'dist_tol', 'alt_min', 'alt_max', 'HA_max', 'HA_min', 'observed'] 

412 types += [float, float, float, float, float, float, bool] 

413 result = np.zeros(1, dtype=list(zip(names, types))) 

414 return result 

415 

416 

417def read_fields(): 

418 """ 

419 Read in the Field coordinates 

420 Returns 

421 ------- 

422 numpy.array 

423 With RA and dec in radians. 

424 """ 

425 query = 'select fieldId, fieldRA, fieldDEC from Field;' 

426 fd = FieldsDatabase() 

427 fields = np.array(list(fd.get_field_set(query))) 

428 # order by field ID 

429 fields = fields[fields[:,0].argsort()] 

430 

431 names = ['RA', 'dec'] 

432 types = [float, float] 

433 result = np.zeros(np.size(fields[:, 1]), dtype=list(zip(names, types))) 

434 result['RA'] = np.radians(fields[:, 1]) 

435 result['dec'] = np.radians(fields[:, 2]) 

436 

437 return result 

438 

439 

440def hp_kd_tree(nside=None, leafsize=100, scale=1e5): 

441 """ 

442 Generate a KD-tree of healpixel locations 

443 

444 Parameters 

445 ---------- 

446 nside : int 

447 A valid healpix nside 

448 leafsize : int (100) 

449 Leafsize of the kdtree 

450 

451 Returns 

452 ------- 

453 tree : scipy kdtree 

454 """ 

455 if nside is None: 

456 nside = set_default_nside() 

457 

458 hpid = np.arange(hp.nside2npix(nside)) 

459 ra, dec = _hpid2RaDec(nside, hpid) 

460 return _buildTree(ra, dec, leafsize, scale=scale) 

461 

462 

463class hp_in_lsst_fov(object): 

464 """ 

465 Return the healpixels within a pointing. A very simple LSST camera model with 

466 no chip/raft gaps. 

467 """ 

468 def __init__(self, nside=None, fov_radius=1.75, scale=1e5): 

469 """ 

470 Parameters 

471 ---------- 

472 fov_radius : float (1.75) 

473 Radius of the filed of view in degrees 

474 """ 

475 if nside is None: 

476 nside = set_default_nside() 

477 

478 self.tree = hp_kd_tree(nside=nside, scale=scale) 

479 self.radius = np.round(xyz_angular_radius(fov_radius)*scale).astype(int) 

480 self.scale = scale 

481 

482 def __call__(self, ra, dec, **kwargs): 

483 """ 

484 Parameters 

485 ---------- 

486 ra : float 

487 RA in radians 

488 dec : float 

489 Dec in radians 

490 

491 Returns 

492 ------- 

493 indx : numpy array 

494 The healpixels that are within the FoV 

495 """ 

496 

497 x, y, z = _xyz_from_ra_dec(np.max(ra), np.max(dec)) 

498 x = np.round(x * self.scale).astype(int) 

499 y = np.round(y * self.scale).astype(int) 

500 z = np.round(z * self.scale).astype(int) 

501 

502 indices = self.tree.query_ball_point((x, y, z), self.radius) 

503 return np.array(indices) 

504 

505 

506class hp_in_comcam_fov(object): 

507 """ 

508 Return the healpixels within a ComCam pointing. Simple camera model 

509 with no chip gaps. 

510 """ 

511 def __init__(self, nside=None, side_length=0.7): 

512 """ 

513 Parameters 

514 ---------- 

515 side_length : float (0.7) 

516 The length of one side of the square field of view (degrees). 

517 """ 

518 if nside is None: 

519 nside = set_default_nside() 

520 self.nside = nside 

521 self.tree = hp_kd_tree(nside=nside) 

522 self.side_length = np.radians(side_length) 

523 self.inner_radius = xyz_angular_radius(side_length/2.) 

524 self.outter_radius = xyz_angular_radius(side_length/2.*np.sqrt(2.)) 

525 # The positions of the raft corners, unrotated 

526 self.corners_x = np.array([-self.side_length/2., -self.side_length/2., self.side_length/2., 

527 self.side_length/2.]) 

528 self.corners_y = np.array([self.side_length/2., -self.side_length/2., -self.side_length/2., 

529 self.side_length/2.]) 

530 

531 def __call__(self, ra, dec, rotSkyPos=0.): 

532 """ 

533 Parameters 

534 ---------- 

535 ra : float 

536 RA in radians 

537 dec : float 

538 Dec in radians 

539 rotSkyPos : float 

540 The rotation angle of the camera in radians 

541 Returns 

542 ------- 

543 indx : numpy array 

544 The healpixels that are within the FoV 

545 """ 

546 x, y, z = _xyz_from_ra_dec(np.max(ra), np.max(dec)) 

547 # Healpixels within the inner circle 

548 indices = self.tree.query_ball_point((x, y, z), self.inner_radius) 

549 # Healpixels withing the outer circle 

550 indices_all = np.array(self.tree.query_ball_point((x, y, z), self.outter_radius)) 

551 indices_to_check = indices_all[np.in1d(indices_all, indices, invert=True)] 

552 

553 cos_rot = np.cos(rotSkyPos) 

554 sin_rot = np.sin(rotSkyPos) 

555 x_rotated = self.corners_x*cos_rot - self.corners_y*sin_rot 

556 y_rotated = self.corners_x*sin_rot + self.corners_y*cos_rot 

557 

558 # Draw the square that we want to check if points are in. 

559 bbPath = mplPath.Path(np.array([[x_rotated[0], y_rotated[0]], 

560 [x_rotated[1], y_rotated[1]], 

561 [x_rotated[2], y_rotated[2]], 

562 [x_rotated[3], y_rotated[3]], 

563 [x_rotated[0], y_rotated[0]]])) 

564 

565 ra_to_check, dec_to_check = _hpid2RaDec(self.nside, indices_to_check) 

566 

567 # Project the indices to check to the tangent plane, see if they fall inside the polygon 

568 x, y = gnomonic_project_toxy(ra_to_check, dec_to_check, ra, dec) 

569 for i, xcheck in enumerate(x): 

570 # I wonder if I can do this all at once rather than a loop? 

571 if bbPath.contains_point((x[i], y[i])): 

572 indices.append(indices_to_check[i]) 

573 

574 return np.array(indices) 

575 

576 

577def run_info_table(observatory, extra_info=None): 

578 """ 

579 Make a little table for recording the information about a run 

580 """ 

581 

582 observatory_info = observatory.get_info() 

583 for key in extra_info: 

584 observatory_info.append([key, extra_info[key]]) 

585 observatory_info = np.array(observatory_info) 

586 

587 n_feature_entries = 4 

588 

589 names = ['Parameter', 'Value'] 

590 dtypes = ['|U200', '|U200'] 

591 result = np.zeros(observatory_info[:, 0].size + n_feature_entries, 

592 dtype=list(zip(names, dtypes))) 

593 

594 # Fill in info about the run 

595 result[0]['Parameter'] = 'Date, ymd' 

596 now = datetime.datetime.now() 

597 result[0]['Value'] = '%i, %i, %i' % (now.year, now.month, now.day) 

598 

599 result[1]['Parameter'] = 'hostname' 

600 result[1]['Value'] = socket.gethostname() 

601 

602 result[2]['Parameter'] = 'featureScheduler version' 

603 result[2]['Value'] = version.__version__ 

604 

605 result[3]['Parameter'] = 'featureScheduler fingerprint' 

606 result[3]['Value'] = version.__fingerprint__ 

607 

608 result[4:]['Parameter'] = observatory_info[:, 0] 

609 result[4:]['Value'] = observatory_info[:, 1] 

610 

611 return result 

612 

613 

614def inrange(inval, minimum=-1., maximum=1.): 

615 """ 

616 Make sure values are within min/max 

617 """ 

618 inval = np.array(inval) 

619 below = np.where(inval < minimum) 

620 inval[below] = minimum 

621 above = np.where(inval > maximum) 

622 inval[above] = maximum 

623 return inval 

624 

625 

626def warm_start(scheduler, observations, mjd_key='mjd'): 

627 """Replay a list of observations into the scheduler 

628 

629 Parameters 

630 ---------- 

631 scheduler : scheduler object 

632 

633 observations : np.array 

634 An array of observation (e.g., from sqlite2observations) 

635 """ 

636 

637 # Check that observations are in order 

638 observations.sort(order=mjd_key) 

639 for observation in observations: 

640 scheduler.add_observation(observation) 

641 

642 return scheduler 

643 

644 

645def season_calc(night, offset=0, modulo=None, max_season=None, season_length=365.25, floor=True): 

646 """ 

647 Compute what season a night is in with possible offset and modulo 

648 

649 using convention that night -365 to 0 is season -1. 

650 

651 Parameters 

652 ---------- 

653 night : int or array 

654 The night we want to convert to a season 

655 offset : float or array (0) 

656 Offset to be applied to night (days) 

657 modulo : int (None) 

658 If the season should be modulated (i.e., so we can get all even years) 

659 (seasons, years w/default season_length) 

660 max_season : int (None) 

661 For any season above this value (before modulo), set to -1 

662 season_length : float (365.25) 

663 How long to consider one season (nights) 

664 floor : bool (True) 

665 If true, take the floor of the season. Otherwise, returns season as a float 

666 """ 

667 if np.size(night) == 1: 

668 night = np.ravel(np.array([night])) 

669 result = night + offset 

670 result = result/season_length 

671 if floor: 

672 result = np.floor(result) 

673 if max_season is not None: 

674 over_indx = np.where(int_rounded(result) >= int_rounded(max_season)) 

675 

676 if modulo is not None: 

677 neg = np.where(int_rounded(result) < int_rounded(0)) 

678 result = result % modulo 

679 result[neg] = -1 

680 if max_season is not None: 

681 result[over_indx] = -1 

682 if floor: 

683 result = result.astype(int) 

684 return result 

685 

686 

687def create_season_offset(nside, sun_RA_rad): 

688 """ 

689 Make an offset map so seasons roll properly 

690 """ 

691 hpindx = np.arange(hp.nside2npix(nside)) 

692 ra, dec = _hpid2RaDec(nside, hpindx) 

693 offset = ra - sun_RA_rad + 2.*np.pi 

694 offset = offset % (np.pi*2) 

695 offset = offset * 365.25/(np.pi*2) 

696 offset = -offset - 365.25 

697 return offset 

698 

699 

700class TargetoO(object): 

701 """Class to hold information about a target of opportunity object 

702 

703 Parameters 

704 ---------- 

705 tooid : int 

706 Unique ID for the ToO. 

707 footprints : np.array 

708 np.array healpix maps. 1 for areas to observe, 0 for no observe. 

709 mjd_start : float 

710 The MJD the ToO starts 

711 duration : float 

712 Duration of the ToO (days). 

713 """ 

714 def __init__(self, tooid, footprint, mjd_start, duration): 

715 self.footprint = footprint 

716 self.duration = duration 

717 self.id = tooid 

718 self.mjd_start = mjd_start 

719 

720 

721class Sim_targetoO_server(object): 

722 """Wrapper to deliver a targetoO object at the right time 

723 """ 

724 

725 def __init__(self, targetoO_list): 

726 self.targetoO_list = targetoO_list 

727 self.mjd_starts = np.array([too.mjd_start for too in self.targetoO_list]) 

728 durations = np.array([too.duration for too in self.targetoO_list]) 

729 self.mjd_ends = self.mjd_starts + durations 

730 

731 def __call__(self, mjd): 

732 in_range = np.where((mjd > self.mjd_starts) & (mjd < self.mjd_ends))[0] 

733 result = None 

734 if in_range.size > 0: 

735 result = [self.targetoO_list[i] for i in in_range] 

736 return result