Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ("Instrument", "makeExposureRecordFromObsInfo", "addUnboundedCalibrationLabel", "loadCamera") 

23 

24import os.path 

25from abc import ABCMeta, abstractmethod 

26from typing import Any, Tuple 

27import astropy.time 

28 

29from lsst.afw.cameraGeom import Camera 

30from lsst.daf.butler import Butler, DataId, TIMESPAN_MIN, TIMESPAN_MAX, DatasetType, DataCoordinate 

31from lsst.utils import getPackageDir, doImport 

32 

33# To be a standard text curated calibration means that we use a 

34# standard definition for the corresponding DatasetType. 

35StandardCuratedCalibrationDatasetTypes = { 

36 "defects": {"dimensions": ("instrument", "detector", "calibration_label"), 

37 "storageClass": "Defects"}, 

38 "qe_curve": {"dimensions": ("instrument", "detector", "calibration_label"), 

39 "storageClass": "QECurve"}, 

40} 

41 

42 

43class Instrument(metaclass=ABCMeta): 

44 """Base class for instrument-specific logic for the Gen3 Butler. 

45 

46 Concrete instrument subclasses should be directly constructable with no 

47 arguments. 

48 """ 

49 

50 configPaths = () 

51 """Paths to config files to read for specific Tasks. 

52 

53 The paths in this list should contain files of the form `task.py`, for 

54 each of the Tasks that requires special configuration. 

55 """ 

56 

57 policyName = None 

58 """Instrument specific name to use when locating a policy or configuration 

59 file in the file system.""" 

60 

61 obsDataPackage = None 

62 """Name of the package containing the text curated calibration files. 

63 Usually a obs _data package. If `None` no curated calibration files 

64 will be read. (`str`)""" 

65 

66 standardCuratedDatasetTypes = tuple(StandardCuratedCalibrationDatasetTypes) 

67 """The dataset types expected to be obtained from the obsDataPackage. 

68 These dataset types are all required to have standard definitions and 

69 must be known to the base class. Clearing this list will prevent 

70 any of these calibrations from being stored. If a dataset type is not 

71 known to a specific instrument it can still be included in this list 

72 since the data package is the source of truth. 

73 """ 

74 

75 @property 

76 @abstractmethod 

77 def filterDefinitions(self): 

78 """`~lsst.obs.base.FilterDefinitionCollection`, defining the filters 

79 for this instrument. 

80 """ 

81 return None 

82 

83 def __init__(self, *args, **kwargs): 

84 self.filterDefinitions.reset() 

85 self.filterDefinitions.defineFilters() 

86 self._obsDataPackageDir = None 

87 

88 @classmethod 

89 @abstractmethod 

90 def getName(cls): 

91 raise NotImplementedError() 

92 

93 @abstractmethod 

94 def getCamera(self): 

95 """Retrieve the cameraGeom representation of this instrument. 

96 

97 This is a temporary API that should go away once obs_ packages have 

98 a standardized approach to writing versioned cameras to a Gen3 repo. 

99 """ 

100 raise NotImplementedError() 

101 

102 @abstractmethod 

103 def register(self, registry): 

104 """Insert instrument, physical_filter, and detector entries into a 

105 `Registry`. 

106 """ 

107 raise NotImplementedError() 

108 

109 @property 

110 def obsDataPackageDir(self): 

111 if self.obsDataPackage is None: 

112 return None 

113 if self._obsDataPackageDir is None: 

114 # Defer any problems with locating the package until 

115 # we need to find it. 

116 self._obsDataPackageDir = getPackageDir(self.obsDataPackage) 

117 return self._obsDataPackageDir 

118 

119 @classmethod 

120 def fromName(cls, name, registry): 

121 """Given an instrument name and a butler, retrieve a corresponding 

122 instantiated instrument object. 

123 

124 Parameters 

125 ---------- 

126 name : `str` 

127 Name of the instrument (must match the name property of 

128 an instrument class). 

129 registry : `lsst.daf.butler.Registry` 

130 Butler registry to query to find the information. 

131 

132 Returns 

133 ------- 

134 instrument : `Instrument` 

135 An instance of the relevant `Instrument`. 

136 

137 Notes 

138 ----- 

139 The instrument must be registered in the corresponding butler. 

140 

141 Raises 

142 ------ 

143 LookupError 

144 Raised if the instrument is not known to the supplied registry. 

145 ModuleNotFoundError 

146 Raised if the class could not be imported. This could mean 

147 that the relevant obs package has not been setup. 

148 TypeError 

149 Raised if the class name retrieved is not a string. 

150 """ 

151 dimensions = list(registry.queryDimensions("instrument", dataId={"instrument": name})) 

152 cls = dimensions[0].records["instrument"].class_name 

153 if not isinstance(cls, str): 

154 raise TypeError(f"Unexpected class name retrieved from {name} instrument dimension (got {cls})") 

155 instrument = doImport(cls) 

156 return instrument() 

157 

158 def _registerFilters(self, registry): 

159 """Register the physical and abstract filter Dimension relationships. 

160 This should be called in the ``register`` implementation. 

161 

162 Parameters 

163 ---------- 

164 registry : `lsst.daf.butler.core.Registry` 

165 The registry to add dimensions to. 

166 """ 

167 for filter in self.filterDefinitions: 

168 # fix for undefined abstract filters causing trouble in the registry: 

169 if filter.abstract_filter is None: 

170 abstract_filter = filter.physical_filter 

171 else: 

172 abstract_filter = filter.abstract_filter 

173 

174 registry.insertDimensionData("physical_filter", 

175 {"instrument": self.getName(), 

176 "name": filter.physical_filter, 

177 "abstract_filter": abstract_filter 

178 }) 

179 

180 @abstractmethod 

181 def getRawFormatter(self, dataId): 

182 """Return the Formatter class that should be used to read a particular 

183 raw file. 

184 

185 Parameters 

186 ---------- 

187 dataId : `DataCoordinate` 

188 Dimension-based ID for the raw file or files being ingested. 

189 

190 Returns 

191 ------- 

192 formatter : `Formatter` class 

193 Class to be used that reads the file into an 

194 `lsst.afw.image.Exposure` instance. 

195 """ 

196 raise NotImplementedError() 

197 

198 def writeCuratedCalibrations(self, butler): 

199 """Write human-curated calibration Datasets to the given Butler with 

200 the appropriate validity ranges. 

201 

202 Parameters 

203 ---------- 

204 butler : `lsst.daf.butler.Butler` 

205 Butler to use to store these calibrations. 

206 

207 Notes 

208 ----- 

209 Expected to be called from subclasses. The base method calls 

210 ``writeCameraGeom`` and ``writeStandardTextCuratedCalibrations``. 

211 """ 

212 self.writeCameraGeom(butler) 

213 self.writeStandardTextCuratedCalibrations(butler) 

214 

215 def applyConfigOverrides(self, name, config): 

216 """Apply instrument-specific overrides for a task config. 

217 

218 Parameters 

219 ---------- 

220 name : `str` 

221 Name of the object being configured; typically the _DefaultName 

222 of a Task. 

223 config : `lsst.pex.config.Config` 

224 Config instance to which overrides should be applied. 

225 """ 

226 for root in self.configPaths: 

227 path = os.path.join(root, f"{name}.py") 

228 if os.path.exists(path): 

229 config.load(path) 

230 

231 def writeCameraGeom(self, butler): 

232 """Write the default camera geometry to the butler repository 

233 with an infinite validity range. 

234 

235 Parameters 

236 ---------- 

237 butler : `lsst.daf.butler.Butler` 

238 Butler to receive these calibration datasets. 

239 """ 

240 

241 datasetType = DatasetType("camera", ("instrument", "calibration_label"), "Camera", 

242 universe=butler.registry.dimensions) 

243 butler.registry.registerDatasetType(datasetType) 

244 unboundedDataId = addUnboundedCalibrationLabel(butler.registry, self.getName()) 

245 camera = self.getCamera() 

246 butler.put(camera, datasetType, unboundedDataId) 

247 

248 def writeStandardTextCuratedCalibrations(self, butler): 

249 """Write the set of standardized curated text calibrations to 

250 the repository. 

251 

252 Parameters 

253 ---------- 

254 butler : `lsst.daf.butler.Butler` 

255 Butler to receive these calibration datasets. 

256 """ 

257 

258 for datasetTypeName in self.standardCuratedDatasetTypes: 

259 # We need to define the dataset types. 

260 if datasetTypeName not in StandardCuratedCalibrationDatasetTypes: 

261 raise ValueError(f"DatasetType {datasetTypeName} not in understood list" 

262 f" [{'.'.join(StandardCuratedCalibrationDatasetTypes)}]") 

263 definition = StandardCuratedCalibrationDatasetTypes[datasetTypeName] 

264 datasetType = DatasetType(datasetTypeName, 

265 universe=butler.registry.dimensions, 

266 **definition) 

267 self._writeSpecificCuratedCalibrationDatasets(butler, datasetType) 

268 

269 def _writeSpecificCuratedCalibrationDatasets(self, butler, datasetType): 

270 """Write standardized curated calibration datasets for this specific 

271 dataset type from an obs data package. 

272 

273 Parameters 

274 ---------- 

275 butler : `lsst.daf.butler.Butler` 

276 Gen3 butler in which to put the calibrations. 

277 datasetType : `lsst.daf.butler.DatasetType` 

278 Dataset type to be put. 

279 

280 Notes 

281 ----- 

282 This method scans the location defined in the ``obsDataPackageDir`` 

283 class attribute for curated calibrations corresponding to the 

284 supplied dataset type. The directory name in the data package must 

285 match the name of the dataset type. They are assumed to use the 

286 standard layout and can be read by 

287 `~lsst.pipe.tasks.read_curated_calibs.read_all` and provide standard 

288 metadata. 

289 """ 

290 if self.obsDataPackageDir is None: 

291 # if there is no data package then there can't be datasets 

292 return 

293 

294 calibPath = os.path.join(self.obsDataPackageDir, self.policyName, 

295 datasetType.name) 

296 

297 if not os.path.exists(calibPath): 

298 return 

299 

300 # Register the dataset type 

301 butler.registry.registerDatasetType(datasetType) 

302 

303 # obs_base can't depend on pipe_tasks but concrete obs packages 

304 # can -- we therefore have to defer import 

305 from lsst.pipe.tasks.read_curated_calibs import read_all 

306 

307 camera = self.getCamera() 

308 calibsDict = read_all(calibPath, camera)[0] # second return is calib type 

309 endOfTime = TIMESPAN_MAX 

310 dimensionRecords = [] 

311 datasetRecords = [] 

312 for det in calibsDict: 

313 times = sorted([k for k in calibsDict[det]]) 

314 calibs = [calibsDict[det][time] for time in times] 

315 times = [astropy.time.Time(t, format="datetime", scale="utc") for t in times] 

316 times += [endOfTime] 

317 for calib, beginTime, endTime in zip(calibs, times[:-1], times[1:]): 

318 md = calib.getMetadata() 

319 calibrationLabel = f"{datasetType.name}/{md['CALIBDATE']}/{md['DETECTOR']}" 

320 dataId = DataCoordinate.standardize( 

321 universe=butler.registry.dimensions, 

322 instrument=self.getName(), 

323 calibration_label=calibrationLabel, 

324 detector=md["DETECTOR"], 

325 ) 

326 datasetRecords.append((calib, dataId)) 

327 dimensionRecords.append({ 

328 "instrument": self.getName(), 

329 "name": calibrationLabel, 

330 "datetime_begin": beginTime, 

331 "datetime_end": endTime, 

332 }) 

333 

334 # Second loop actually does the inserts and filesystem writes. 

335 with butler.transaction(): 

336 butler.registry.insertDimensionData("calibration_label", *dimensionRecords) 

337 # TODO: vectorize these puts, once butler APIs for that become 

338 # available. 

339 for calib, dataId in datasetRecords: 

340 butler.put(calib, datasetType, dataId) 

341 

342 

343def makeExposureRecordFromObsInfo(obsInfo, universe): 

344 """Construct an exposure DimensionRecord from 

345 `astro_metadata_translator.ObservationInfo`. 

346 

347 Parameters 

348 ---------- 

349 obsInfo : `astro_metadata_translator.ObservationInfo` 

350 A `~astro_metadata_translator.ObservationInfo` object corresponding to 

351 the exposure. 

352 universe : `DimensionUniverse` 

353 Set of all known dimensions. 

354 

355 Returns 

356 ------- 

357 record : `DimensionRecord` 

358 A record containing exposure metadata, suitable for insertion into 

359 a `Registry`. 

360 """ 

361 dimension = universe["exposure"] 

362 return dimension.RecordClass.fromDict({ 

363 "instrument": obsInfo.instrument, 

364 "id": obsInfo.exposure_id, 

365 "name": obsInfo.observation_id, 

366 "group_name": obsInfo.exposure_group, 

367 "group_id": obsInfo.visit_id, 

368 "datetime_begin": obsInfo.datetime_begin, 

369 "datetime_end": obsInfo.datetime_end, 

370 "exposure_time": obsInfo.exposure_time.to_value("s"), 

371 "dark_time": obsInfo.dark_time.to_value("s"), 

372 "observation_type": obsInfo.observation_type, 

373 "physical_filter": obsInfo.physical_filter, 

374 }) 

375 

376 

377def addUnboundedCalibrationLabel(registry, instrumentName): 

378 """Add a special 'unbounded' calibration_label dimension entry for the 

379 given camera that is valid for any exposure. 

380 

381 If such an entry already exists, this function just returns a `DataId` 

382 for the existing entry. 

383 

384 Parameters 

385 ---------- 

386 registry : `Registry` 

387 Registry object in which to insert the dimension entry. 

388 instrumentName : `str` 

389 Name of the instrument this calibration label is associated with. 

390 

391 Returns 

392 ------- 

393 dataId : `DataId` 

394 New or existing data ID for the unbounded calibration. 

395 """ 

396 d = dict(instrument=instrumentName, calibration_label="unbounded") 

397 try: 

398 return registry.expandDataId(d) 

399 except LookupError: 

400 pass 

401 entry = d.copy() 

402 entry["datetime_begin"] = TIMESPAN_MIN 

403 entry["datetime_end"] = TIMESPAN_MAX 

404 registry.insertDimensionData("calibration_label", entry) 

405 return registry.expandDataId(d) 

406 

407 

408def loadCamera(butler: Butler, dataId: DataId, *, collections: Any = None) -> Tuple[Camera, bool]: 

409 """Attempt to load versioned camera geometry from a butler, but fall back 

410 to obtaining a nominal camera from the `Instrument` class if that fails. 

411 

412 Parameters 

413 ---------- 

414 butler : `lsst.daf.butler.Butler` 

415 Butler instance to attempt to query for and load a ``camera`` dataset 

416 from. 

417 dataId : `dict` or `DataCoordinate` 

418 Data ID that identifies at least the ``instrument`` and ``exposure`` 

419 dimensions. 

420 collections : Any, optional 

421 Collections to be searched, overriding ``self.butler.collections``. 

422 Can be any of the types supported by the ``collections`` argument 

423 to butler construction. 

424 

425 Returns 

426 ------- 

427 camera : `lsst.afw.cameraGeom.Camera` 

428 Camera object. 

429 versioned : `bool` 

430 If `True`, the camera was obtained from the butler and should represent 

431 a versioned camera from a calibration repository. If `False`, no 

432 camera datasets were found, and the returned camera was produced by 

433 instantiating the appropriate `Instrument` class and calling 

434 `Instrument.getCamera`. 

435 """ 

436 if collections is None: 

437 collections = butler.collections 

438 # Registry would do data ID expansion internally if we didn't do it first, 

439 # but we might want an expanded data ID ourselves later, so we do it here 

440 # to ensure it only happens once. 

441 # This will also catch problems with the data ID not having keys we need. 

442 dataId = butler.registry.expandDataId(dataId, graph=butler.registry.dimensions["exposure"].graph) 

443 cameraRefs = list(butler.registry.queryDatasets("camera", dataId=dataId, collections=collections, 

444 deduplicate=True)) 

445 if cameraRefs: 

446 assert len(cameraRefs) == 1, "Should be guaranteed by deduplicate=True above." 

447 return butler.getDirect(cameraRefs[0]), True 

448 instrument = Instrument.fromName(dataId["instrument"], butler.registry) 

449 return instrument.getCamera(), False