Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ("Instrument", "makeExposureRecordFromObsInfo", "makeVisitRecordFromObsInfo", 

23 "addUnboundedCalibrationLabel") 

24 

25import os.path 

26from abc import ABCMeta, abstractmethod 

27import astropy.time 

28 

29from lsst.daf.butler import TIMESPAN_MIN, TIMESPAN_MAX, DatasetType, DataCoordinate 

30from lsst.utils import getPackageDir, doImport 

31 

32# To be a standard text curated calibration means that we use a 

33# standard definition for the corresponding DatasetType. 

34StandardCuratedCalibrationDatasetTypes = { 

35 "defects": {"dimensions": ("instrument", "detector", "calibration_label"), 

36 "storageClass": "Defects"}, 

37 "qe_curve": {"dimensions": ("instrument", "detector", "calibration_label"), 

38 "storageClass": "QECurve"}, 

39} 

40 

41 

42class Instrument(metaclass=ABCMeta): 

43 """Base class for instrument-specific logic for the Gen3 Butler. 

44 

45 Concrete instrument subclasses should be directly constructable with no 

46 arguments. 

47 """ 

48 

49 configPaths = () 

50 """Paths to config files to read for specific Tasks. 

51 

52 The paths in this list should contain files of the form `task.py`, for 

53 each of the Tasks that requires special configuration. 

54 """ 

55 

56 policyName = None 

57 """Instrument specific name to use when locating a policy or configuration 

58 file in the file system.""" 

59 

60 obsDataPackage = None 

61 """Name of the package containing the text curated calibration files. 

62 Usually a obs _data package. If `None` no curated calibration files 

63 will be read. (`str`)""" 

64 

65 standardCuratedDatasetTypes = tuple(StandardCuratedCalibrationDatasetTypes) 

66 """The dataset types expected to be obtained from the obsDataPackage. 

67 These dataset types are all required to have standard definitions and 

68 must be known to the base class. Clearing this list will prevent 

69 any of these calibrations from being stored. If a dataset type is not 

70 known to a specific instrument it can still be included in this list 

71 since the data package is the source of truth. 

72 """ 

73 

74 @property 

75 @abstractmethod 

76 def filterDefinitions(self): 

77 """`~lsst.obs.base.FilterDefinitionCollection`, defining the filters 

78 for this instrument. 

79 """ 

80 return None 

81 

82 def __init__(self, *args, **kwargs): 

83 self.filterDefinitions.reset() 

84 self.filterDefinitions.defineFilters() 

85 self._obsDataPackageDir = None 

86 

87 @classmethod 

88 @abstractmethod 

89 def getName(cls): 

90 raise NotImplementedError() 

91 

92 @abstractmethod 

93 def getCamera(self): 

94 """Retrieve the cameraGeom representation of this instrument. 

95 

96 This is a temporary API that should go away once obs_ packages have 

97 a standardized approach to writing versioned cameras to a Gen3 repo. 

98 """ 

99 raise NotImplementedError() 

100 

101 @abstractmethod 

102 def register(self, registry): 

103 """Insert instrument, physical_filter, and detector entries into a 

104 `Registry`. 

105 """ 

106 raise NotImplementedError() 

107 

108 @property 

109 def obsDataPackageDir(self): 

110 if self.obsDataPackage is None: 

111 return None 

112 if self._obsDataPackageDir is None: 

113 # Defer any problems with locating the package until 

114 # we need to find it. 

115 self._obsDataPackageDir = getPackageDir(self.obsDataPackage) 

116 return self._obsDataPackageDir 

117 

118 @classmethod 

119 def fromName(cls, name, registry): 

120 """Given an instrument name and a butler, retrieve a corresponding 

121 instantiated instrument object. 

122 

123 Parameters 

124 ---------- 

125 name : `str` 

126 Name of the instrument (must match the name property of 

127 an instrument class). 

128 registry : `lsst.daf.butler.Registry` 

129 Butler registry to query to find the information. 

130 

131 Returns 

132 ------- 

133 instrument : `Instrument` 

134 An instance of the relevant `Instrument`. 

135 

136 Notes 

137 ----- 

138 The instrument must be registered in the corresponding butler. 

139 

140 Raises 

141 ------ 

142 LookupError 

143 Raised if the instrument is not known to the supplied registry. 

144 ModuleNotFoundError 

145 Raised if the class could not be imported. This could mean 

146 that the relevant obs package has not been setup. 

147 TypeError 

148 Raised if the class name retrieved is not a string. 

149 """ 

150 dimensions = list(registry.queryDimensions("instrument", dataId={"instrument": name})) 

151 cls = dimensions[0].records["instrument"].class_name 

152 if not isinstance(cls, str): 

153 raise TypeError(f"Unexpected class name retrieved from {name} instrument dimension (got {cls})") 

154 instrument = doImport(cls) 

155 return instrument() 

156 

157 def _registerFilters(self, registry): 

158 """Register the physical and abstract filter Dimension relationships. 

159 This should be called in the ``register`` implementation. 

160 

161 Parameters 

162 ---------- 

163 registry : `lsst.daf.butler.core.Registry` 

164 The registry to add dimensions to. 

165 """ 

166 for filter in self.filterDefinitions: 

167 # fix for undefined abstract filters causing trouble in the registry: 

168 if filter.abstract_filter is None: 

169 abstract_filter = filter.physical_filter 

170 else: 

171 abstract_filter = filter.abstract_filter 

172 

173 registry.insertDimensionData("physical_filter", 

174 {"instrument": self.getName(), 

175 "name": filter.physical_filter, 

176 "abstract_filter": abstract_filter 

177 }) 

178 

179 @abstractmethod 

180 def getRawFormatter(self, dataId): 

181 """Return the Formatter class that should be used to read a particular 

182 raw file. 

183 

184 Parameters 

185 ---------- 

186 dataId : `DataCoordinate` 

187 Dimension-based ID for the raw file or files being ingested. 

188 

189 Returns 

190 ------- 

191 formatter : `Formatter` class 

192 Class to be used that reads the file into an 

193 `lsst.afw.image.Exposure` instance. 

194 """ 

195 raise NotImplementedError() 

196 

197 def writeCuratedCalibrations(self, butler): 

198 """Write human-curated calibration Datasets to the given Butler with 

199 the appropriate validity ranges. 

200 

201 Parameters 

202 ---------- 

203 butler : `lsst.daf.butler.Butler` 

204 Butler to use to store these calibrations. 

205 

206 Notes 

207 ----- 

208 Expected to be called from subclasses. The base method calls 

209 ``writeCameraGeom`` and ``writeStandardTextCuratedCalibrations``. 

210 """ 

211 self.writeCameraGeom(butler) 

212 self.writeStandardTextCuratedCalibrations(butler) 

213 

214 def applyConfigOverrides(self, name, config): 

215 """Apply instrument-specific overrides for a task config. 

216 

217 Parameters 

218 ---------- 

219 name : `str` 

220 Name of the object being configured; typically the _DefaultName 

221 of a Task. 

222 config : `lsst.pex.config.Config` 

223 Config instance to which overrides should be applied. 

224 """ 

225 for root in self.configPaths: 

226 path = os.path.join(root, f"{name}.py") 

227 if os.path.exists(path): 

228 config.load(path) 

229 

230 def writeCameraGeom(self, butler): 

231 """Write the default camera geometry to the butler repository 

232 with an infinite validity range. 

233 

234 Parameters 

235 ---------- 

236 butler : `lsst.daf.butler.Butler` 

237 Butler to receive these calibration datasets. 

238 """ 

239 

240 datasetType = DatasetType("camera", ("instrument", "calibration_label"), "Camera", 

241 universe=butler.registry.dimensions) 

242 butler.registry.registerDatasetType(datasetType) 

243 unboundedDataId = addUnboundedCalibrationLabel(butler.registry, self.getName()) 

244 camera = self.getCamera() 

245 butler.put(camera, datasetType, unboundedDataId) 

246 

247 def writeStandardTextCuratedCalibrations(self, butler): 

248 """Write the set of standardized curated text calibrations to 

249 the repository. 

250 

251 Parameters 

252 ---------- 

253 butler : `lsst.daf.butler.Butler` 

254 Butler to receive these calibration datasets. 

255 """ 

256 

257 for datasetTypeName in self.standardCuratedDatasetTypes: 

258 # We need to define the dataset types. 

259 if datasetTypeName not in StandardCuratedCalibrationDatasetTypes: 

260 raise ValueError(f"DatasetType {datasetTypeName} not in understood list" 

261 f" [{'.'.join(StandardCuratedCalibrationDatasetTypes)}]") 

262 definition = StandardCuratedCalibrationDatasetTypes[datasetTypeName] 

263 datasetType = DatasetType(datasetTypeName, 

264 universe=butler.registry.dimensions, 

265 **definition) 

266 self._writeSpecificCuratedCalibrationDatasets(butler, datasetType) 

267 

268 def _writeSpecificCuratedCalibrationDatasets(self, butler, datasetType): 

269 """Write standardized curated calibration datasets for this specific 

270 dataset type from an obs data package. 

271 

272 Parameters 

273 ---------- 

274 butler : `lsst.daf.butler.Butler` 

275 Gen3 butler in which to put the calibrations. 

276 datasetType : `lsst.daf.butler.DatasetType` 

277 Dataset type to be put. 

278 

279 Notes 

280 ----- 

281 This method scans the location defined in the ``obsDataPackageDir`` 

282 class attribute for curated calibrations corresponding to the 

283 supplied dataset type. The directory name in the data package must 

284 match the name of the dataset type. They are assumed to use the 

285 standard layout and can be read by 

286 `~lsst.pipe.tasks.read_curated_calibs.read_all` and provide standard 

287 metadata. 

288 """ 

289 if self.obsDataPackageDir is None: 

290 # if there is no data package then there can't be datasets 

291 return 

292 

293 calibPath = os.path.join(self.obsDataPackageDir, self.policyName, 

294 datasetType.name) 

295 

296 if not os.path.exists(calibPath): 

297 return 

298 

299 # Register the dataset type 

300 butler.registry.registerDatasetType(datasetType) 

301 

302 # obs_base can't depend on pipe_tasks but concrete obs packages 

303 # can -- we therefore have to defer import 

304 from lsst.pipe.tasks.read_curated_calibs import read_all 

305 

306 camera = self.getCamera() 

307 calibsDict = read_all(calibPath, camera)[0] # second return is calib type 

308 endOfTime = TIMESPAN_MAX 

309 dimensionRecords = [] 

310 datasetRecords = [] 

311 for det in calibsDict: 

312 times = sorted([k for k in calibsDict[det]]) 

313 calibs = [calibsDict[det][time] for time in times] 

314 times = [astropy.time.Time(t, format="datetime", scale="utc") for t in times] 

315 times += [endOfTime] 

316 for calib, beginTime, endTime in zip(calibs, times[:-1], times[1:]): 

317 md = calib.getMetadata() 

318 calibrationLabel = f"{datasetType.name}/{md['CALIBDATE']}/{md['DETECTOR']}" 

319 dataId = DataCoordinate.standardize( 

320 universe=butler.registry.dimensions, 

321 instrument=self.getName(), 

322 calibration_label=calibrationLabel, 

323 detector=md["DETECTOR"], 

324 ) 

325 datasetRecords.append((calib, dataId)) 

326 dimensionRecords.append({ 

327 "instrument": self.getName(), 

328 "name": calibrationLabel, 

329 "datetime_begin": beginTime, 

330 "datetime_end": endTime, 

331 }) 

332 

333 # Second loop actually does the inserts and filesystem writes. 

334 with butler.transaction(): 

335 butler.registry.insertDimensionData("calibration_label", *dimensionRecords) 

336 # TODO: vectorize these puts, once butler APIs for that become 

337 # available. 

338 for calib, dataId in datasetRecords: 

339 butler.put(calib, datasetType, dataId) 

340 

341 

342def makeExposureRecordFromObsInfo(obsInfo, universe): 

343 """Construct an exposure DimensionRecord from 

344 `astro_metadata_translator.ObservationInfo`. 

345 

346 Parameters 

347 ---------- 

348 obsInfo : `astro_metadata_translator.ObservationInfo` 

349 A `~astro_metadata_translator.ObservationInfo` object corresponding to 

350 the exposure. 

351 universe : `DimensionUniverse` 

352 Set of all known dimensions. 

353 

354 Returns 

355 ------- 

356 record : `DimensionRecord` 

357 A record containing exposure metadata, suitable for insertion into 

358 a `Registry`. 

359 """ 

360 dimension = universe["exposure"] 

361 return dimension.RecordClass.fromDict({ 

362 "instrument": obsInfo.instrument, 

363 "id": obsInfo.exposure_id, 

364 "name": obsInfo.observation_id, 

365 "group_name": obsInfo.exposure_group, 

366 "datetime_begin": obsInfo.datetime_begin, 

367 "datetime_end": obsInfo.datetime_end, 

368 "exposure_time": obsInfo.exposure_time.to_value("s"), 

369 "dark_time": obsInfo.dark_time.to_value("s"), 

370 "observation_type": obsInfo.observation_type, 

371 "physical_filter": obsInfo.physical_filter, 

372 "visit": obsInfo.visit_id, 

373 }) 

374 

375 

376def makeVisitRecordFromObsInfo(obsInfo, universe, *, region=None): 

377 """Construct a visit `DimensionRecord` from 

378 `astro_metadata_translator.ObservationInfo`. 

379 

380 Parameters 

381 ---------- 

382 obsInfo : `astro_metadata_translator.ObservationInfo` 

383 A `~astro_metadata_translator.ObservationInfo` object corresponding to 

384 the exposure. 

385 universe : `DimensionUniverse` 

386 Set of all known dimensions. 

387 region : `lsst.sphgeom.Region`, optional 

388 Spatial region for the visit. 

389 

390 Returns 

391 ------- 

392 record : `DimensionRecord` 

393 A record containing visit metadata, suitable for insertion into a 

394 `Registry`. 

395 """ 

396 dimension = universe["visit"] 

397 return dimension.RecordClass.fromDict({ 

398 "instrument": obsInfo.instrument, 

399 "id": obsInfo.visit_id, 

400 "name": obsInfo.observation_id, 

401 "datetime_begin": obsInfo.datetime_begin, 

402 "datetime_end": obsInfo.datetime_end, 

403 "exposure_time": obsInfo.exposure_time.to_value("s"), 

404 "physical_filter": obsInfo.physical_filter, 

405 "region": region, 

406 }) 

407 

408 

409def addUnboundedCalibrationLabel(registry, instrumentName): 

410 """Add a special 'unbounded' calibration_label dimension entry for the 

411 given camera that is valid for any exposure. 

412 

413 If such an entry already exists, this function just returns a `DataId` 

414 for the existing entry. 

415 

416 Parameters 

417 ---------- 

418 registry : `Registry` 

419 Registry object in which to insert the dimension entry. 

420 instrumentName : `str` 

421 Name of the instrument this calibration label is associated with. 

422 

423 Returns 

424 ------- 

425 dataId : `DataId` 

426 New or existing data ID for the unbounded calibration. 

427 """ 

428 d = dict(instrument=instrumentName, calibration_label="unbounded") 

429 try: 

430 return registry.expandDataId(d) 

431 except LookupError: 

432 pass 

433 entry = d.copy() 

434 entry["datetime_begin"] = TIMESPAN_MIN 

435 entry["datetime_end"] = TIMESPAN_MAX 

436 registry.insertDimensionData("calibration_label", entry) 

437 return registry.expandDataId(d)