Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = [ 

25 "DefineVisitsConfig", 

26 "DefineVisitsTask", 

27 "GroupExposuresConfig", 

28 "GroupExposuresTask", 

29 "VisitDefinitionData", 

30] 

31 

32from abc import ABCMeta, abstractmethod 

33from collections import defaultdict 

34import dataclasses 

35from typing import Any, Dict, Iterable, List, Optional, Tuple 

36from multiprocessing import Pool 

37 

38from lsst.daf.butler import ( 

39 Butler, 

40 DataId, 

41 DimensionGraph, 

42 DimensionRecord, 

43 Timespan, 

44) 

45 

46import lsst.geom 

47from lsst.geom import Box2D 

48from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable 

49from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS 

50from lsst.pipe.base import Task 

51from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d 

52from ._instrument import loadCamera, Instrument 

53 

54 

55@dataclasses.dataclass 

56class VisitDefinitionData: 

57 """Struct representing a group of exposures that will be used to define a 

58 visit. 

59 """ 

60 

61 instrument: str 

62 """Name of the instrument this visit will be associated with. 

63 """ 

64 

65 id: int 

66 """Integer ID of the visit. 

67 

68 This must be unique across all visit systems for the instrument. 

69 """ 

70 

71 name: str 

72 """String name for the visit. 

73 

74 This must be unique across all visit systems for the instrument. 

75 """ 

76 

77 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list) 

78 """Dimension records for the exposures that are part of this visit. 

79 """ 

80 

81 

82@dataclasses.dataclass 

83class _VisitRecords: 

84 """Struct containing the dimension records associated with a visit. 

85 """ 

86 

87 visit: DimensionRecord 

88 """Record for the 'visit' dimension itself. 

89 """ 

90 

91 visit_definition: List[DimensionRecord] 

92 """Records for 'visit_definition', which relates 'visit' to 'exposure'. 

93 """ 

94 

95 visit_detector_region: List[DimensionRecord] 

96 """Records for 'visit_detector_region', which associates the combination 

97 of a 'visit' and a 'detector' with a region on the sky. 

98 """ 

99 

100 

101class GroupExposuresConfig(Config): 

102 pass 

103 

104 

105class GroupExposuresTask(Task, metaclass=ABCMeta): 

106 """Abstract base class for the subtask of `DefineVisitsTask` that is 

107 responsible for grouping exposures into visits. 

108 

109 Subclasses should be registered with `GroupExposuresTask.registry` to 

110 enable use by `DefineVisitsTask`, and should generally correspond to a 

111 particular 'visit_system' dimension value. They are also responsible for 

112 defining visit IDs and names that are unique across all visit systems in 

113 use by an instrument. 

114 

115 Parameters 

116 ---------- 

117 config : `GroupExposuresConfig` 

118 Configuration information. 

119 **kwargs 

120 Additional keyword arguments forwarded to the `Task` constructor. 

121 """ 

122 def __init__(self, config: GroupExposuresConfig, **kwargs: Any): 

123 Task.__init__(self, config=config, **kwargs) 

124 

125 ConfigClass = GroupExposuresConfig 

126 

127 _DefaultName = "groupExposures" 

128 

129 registry = makeRegistry( 

130 doc="Registry of algorithms for grouping exposures into visits.", 

131 configBaseType=GroupExposuresConfig, 

132 ) 

133 

134 @abstractmethod 

135 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

136 """Group the given exposures into visits. 

137 

138 Parameters 

139 ---------- 

140 exposures : `list` [ `DimensionRecord` ] 

141 DimensionRecords (for the 'exposure' dimension) describing the 

142 exposures to group. 

143 

144 Returns 

145 ------- 

146 visits : `Iterable` [ `VisitDefinitionData` ] 

147 Structs identifying the visits and the exposures associated with 

148 them. This may be an iterator or a container. 

149 """ 

150 raise NotImplementedError() 

151 

152 @abstractmethod 

153 def getVisitSystem(self) -> Tuple[int, str]: 

154 """Return identifiers for the 'visit_system' dimension this 

155 algorithm implements. 

156 

157 Returns 

158 ------- 

159 id : `int` 

160 Integer ID for the visit system (given an instrument). 

161 name : `str` 

162 Unique string identifier for the visit system (given an 

163 instrument). 

164 """ 

165 raise NotImplementedError() 

166 

167 

168class ComputeVisitRegionsConfig(Config): 

169 padding = Field( 

170 dtype=int, 

171 default=0, 

172 doc=("Pad raw image bounding boxes with specified number of pixels " 

173 "when calculating their (conservatively large) region on the " 

174 "sky."), 

175 ) 

176 

177 

178class ComputeVisitRegionsTask(Task, metaclass=ABCMeta): 

179 """Abstract base class for the subtask of `DefineVisitsTask` that is 

180 responsible for extracting spatial regions for visits and visit+detector 

181 combinations. 

182 

183 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to 

184 enable use by `DefineVisitsTask`. 

185 

186 Parameters 

187 ---------- 

188 config : `ComputeVisitRegionsConfig` 

189 Configuration information. 

190 butler : `lsst.daf.butler.Butler` 

191 The butler to use. 

192 **kwargs 

193 Additional keyword arguments forwarded to the `Task` constructor. 

194 """ 

195 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any): 

196 Task.__init__(self, config=config, **kwargs) 

197 self.butler = butler 

198 self.instrumentMap = {} 

199 

200 ConfigClass = ComputeVisitRegionsConfig 

201 

202 _DefaultName = "computeVisitRegions" 

203 

204 registry = makeRegistry( 

205 doc=("Registry of algorithms for computing on-sky regions for visits " 

206 "and visit+detector combinations."), 

207 configBaseType=ComputeVisitRegionsConfig, 

208 ) 

209 

210 def getInstrument(self, instrumentName) -> Instrument: 

211 """Retrieve an `~lsst.obs.base.Instrument` associated with this 

212 instrument name. 

213 

214 Parameters 

215 ---------- 

216 instrumentName : `str` 

217 The name of the instrument. 

218 

219 Returns 

220 ------- 

221 instrument : `~lsst.obs.base.Instrument` 

222 The associated instrument object. 

223 

224 Notes 

225 ----- 

226 The result is cached. 

227 """ 

228 instrument = self.instrumentMap.get(instrumentName) 

229 if instrument is None: 

230 instrument = Instrument.fromName(instrumentName, self.butler.registry) 

231 self.instrumentMap[instrumentName] = instrument 

232 return instrument 

233 

234 @abstractmethod 

235 def compute(self, visit: VisitDefinitionData, *, collections: Any = None 

236 ) -> Tuple[Region, Dict[int, Region]]: 

237 """Compute regions for the given visit and all detectors in that visit. 

238 

239 Parameters 

240 ---------- 

241 visit : `VisitDefinitionData` 

242 Struct describing the visit and the exposures associated with it. 

243 collections : Any, optional 

244 Collections to be searched for raws and camera geometry, overriding 

245 ``self.butler.collections``. 

246 Can be any of the types supported by the ``collections`` argument 

247 to butler construction. 

248 

249 Returns 

250 ------- 

251 visitRegion : `lsst.sphgeom.Region` 

252 Region for the full visit. 

253 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ] 

254 Dictionary mapping detector ID to the region for that detector. 

255 Should include all detectors in the visit. 

256 """ 

257 raise NotImplementedError() 

258 

259 

260class DefineVisitsConfig(Config): 

261 groupExposures = GroupExposuresTask.registry.makeField( 

262 doc="Algorithm for grouping exposures into visits.", 

263 default="one-to-one", 

264 ) 

265 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField( 

266 doc="Algorithm from computing visit and visit+detector regions.", 

267 default="single-raw-wcs", 

268 ) 

269 ignoreNonScienceExposures = Field( 

270 doc=("If True, silently ignore input exposures that do not have " 

271 "observation_type=SCIENCE. If False, raise an exception if one " 

272 "encountered."), 

273 dtype=bool, 

274 optional=False, 

275 default=True, 

276 ) 

277 

278 

279class DefineVisitsTask(Task): 

280 """Driver Task for defining visits (and their spatial regions) in Gen3 

281 Butler repositories. 

282 

283 Parameters 

284 ---------- 

285 config : `DefineVisitsConfig` 

286 Configuration for the task. 

287 butler : `~lsst.daf.butler.Butler` 

288 Writeable butler instance. Will be used to read `raw.wcs` and `camera` 

289 datasets and insert/sync dimension data. 

290 **kwargs 

291 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task` 

292 constructor. 

293 

294 Notes 

295 ----- 

296 Each instance of `DefineVisitsTask` reads from / writes to the same Butler. 

297 Each invocation of `DefineVisitsTask.run` processes an independent group of 

298 exposures into one or more new vists, all belonging to the same visit 

299 system and instrument. 

300 

301 The actual work of grouping exposures and computing regions is delegated 

302 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`), 

303 respectively. The defaults are to create one visit for every exposure, 

304 and to use exactly one (arbitrary) detector-level raw dataset's WCS along 

305 with camera geometry to compute regions for all detectors. Other 

306 implementations can be created and configured for instruments for which 

307 these choices are unsuitable (e.g. because visits and exposures are not 

308 one-to-one, or because ``raw.wcs`` datasets for different detectors may not 

309 be consistent with camera geomery). 

310 

311 It is not necessary in general to ingest all raws for an exposure before 

312 defining a visit that includes the exposure; this depends entirely on the 

313 `ComputeVisitRegionTask` subclass used. For the default configuration, 

314 a single raw for each exposure is sufficient. 

315 """ 

316 def __init__(self, config: Optional[DefineVisitsConfig] = None, *, butler: Butler, **kwargs: Any): 

317 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here. 

318 super().__init__(config, **kwargs) 

319 self.butler = butler 

320 self.universe = self.butler.registry.dimensions 

321 self.makeSubtask("groupExposures") 

322 self.makeSubtask("computeVisitRegions", butler=self.butler) 

323 

324 ConfigClass = DefineVisitsConfig 

325 

326 _DefaultName = "defineVisits" 

327 

328 def _buildVisitRecords(self, definition: VisitDefinitionData, *, 

329 collections: Any = None) -> _VisitRecords: 

330 """Build the DimensionRecords associated with a visit. 

331 

332 Parameters 

333 ---------- 

334 definition : `VisitDefinition` 

335 Struct with identifiers for the visit and records for its 

336 constituent exposures. 

337 collections : Any, optional 

338 Collections to be searched for raws and camera geometry, overriding 

339 ``self.butler.collections``. 

340 Can be any of the types supported by the ``collections`` argument 

341 to butler construction. 

342 

343 Results 

344 ------- 

345 records : `_VisitRecords` 

346 Struct containing DimensionRecords for the visit, including 

347 associated dimension elements. 

348 """ 

349 # Compute all regions. 

350 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute(definition, 

351 collections=collections) 

352 # Aggregate other exposure quantities. 

353 timespan = Timespan( 

354 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)), 

355 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)), 

356 ) 

357 exposure_time = _reduceOrNone(sum, (e.exposure_time for e in definition.exposures)) 

358 physical_filter = _reduceOrNone(lambda a, b: a if a == b else None, 

359 (e.physical_filter for e in definition.exposures)) 

360 target_name = _reduceOrNone(lambda a, b: a if a == b else None, 

361 (e.target_name for e in definition.exposures)) 

362 science_program = _reduceOrNone(lambda a, b: a if a == b else None, 

363 (e.science_program for e in definition.exposures)) 

364 

365 # Use the mean zenith angle as an approximation 

366 zenith_angle = _reduceOrNone(sum, (e.zenith_angle for e in definition.exposures)) 

367 if zenith_angle is not None: 

368 zenith_angle /= len(definition.exposures) 

369 

370 # Construct the actual DimensionRecords. 

371 return _VisitRecords( 

372 visit=self.universe["visit"].RecordClass( 

373 instrument=definition.instrument, 

374 id=definition.id, 

375 name=definition.name, 

376 physical_filter=physical_filter, 

377 target_name=target_name, 

378 science_program=science_program, 

379 zenith_angle=zenith_angle, 

380 visit_system=self.groupExposures.getVisitSystem()[0], 

381 exposure_time=exposure_time, 

382 timespan=timespan, 

383 region=visitRegion, 

384 # TODO: no seeing value in exposure dimension records, so we can't 

385 # set that here. But there are many other columns that both 

386 # dimensions should probably have as well. 

387 ), 

388 visit_definition=[ 

389 self.universe["visit_definition"].RecordClass( 

390 instrument=definition.instrument, 

391 visit=definition.id, 

392 exposure=exposure.id, 

393 visit_system=self.groupExposures.getVisitSystem()[0], 

394 ) 

395 for exposure in definition.exposures 

396 ], 

397 visit_detector_region=[ 

398 self.universe["visit_detector_region"].RecordClass( 

399 instrument=definition.instrument, 

400 visit=definition.id, 

401 detector=detectorId, 

402 region=detectorRegion, 

403 ) 

404 for detectorId, detectorRegion in visitDetectorRegions.items() 

405 ] 

406 ) 

407 

408 def run(self, dataIds: Iterable[DataId], *, 

409 pool: Optional[Pool] = None, 

410 processes: int = 1, 

411 collections: Optional[str] = None): 

412 """Add visit definitions to the registry for the given exposures. 

413 

414 Parameters 

415 ---------- 

416 dataIds : `Iterable` [ `dict` or `DataCoordinate` ] 

417 Exposure-level data IDs. These must all correspond to the same 

418 instrument, and are expected to be on-sky science exposures. 

419 pool : `multiprocessing.Pool`, optional 

420 If not `None`, a process pool with which to parallelize some 

421 operations. 

422 processes : `int`, optional 

423 The number of processes to use. Ignored if ``pool`` is not `None`. 

424 collections : Any, optional 

425 Collections to be searched for raws and camera geometry, overriding 

426 ``self.butler.collections``. 

427 Can be any of the types supported by the ``collections`` argument 

428 to butler construction. 

429 """ 

430 # Set up multiprocessing, if desired. 

431 if pool is None and processes > 1: 

432 pool = Pool(processes) 

433 mapFunc = map if pool is None else pool.imap_unordered 

434 # Normalize, expand, and deduplicate data IDs. 

435 self.log.info("Preprocessing data IDs.") 

436 dimensions = DimensionGraph(self.universe, names=["exposure"]) 

437 dataIds = set(mapFunc(lambda d: self.butler.registry.expandDataId(d, graph=dimensions), dataIds)) 

438 if not dataIds: 

439 raise RuntimeError("No exposures given.") 

440 # Extract exposure DimensionRecords, check that there's only one 

441 # instrument in play, and check for non-science exposures. 

442 exposures = [] 

443 instruments = set() 

444 for dataId in dataIds: 

445 record = dataId.records["exposure"] 

446 if record.observation_type != "science": 

447 if self.config.ignoreNonScienceExposures: 

448 continue 

449 else: 

450 raise RuntimeError(f"Input exposure {dataId} has observation_type " 

451 f"{record.observation_type}, not 'science'.") 

452 instruments.add(dataId["instrument"]) 

453 exposures.append(record) 

454 if not exposures: 

455 self.log.info("No science exposures found after filtering.") 

456 return 

457 if len(instruments) > 1: 

458 raise RuntimeError( 

459 f"All data IDs passed to DefineVisitsTask.run must be " 

460 f"from the same instrument; got {instruments}." 

461 ) 

462 instrument, = instruments 

463 # Ensure the visit_system our grouping algorithm uses is in the 

464 # registry, if it wasn't already. 

465 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem() 

466 self.log.info("Registering visit_system %d: %s.", visitSystemId, visitSystemName) 

467 self.butler.registry.syncDimensionData( 

468 "visit_system", 

469 {"instrument": instrument, "id": visitSystemId, "name": visitSystemName} 

470 ) 

471 # Group exposures into visits, delegating to subtask. 

472 self.log.info("Grouping %d exposure(s) into visits.", len(exposures)) 

473 definitions = list(self.groupExposures.group(exposures)) 

474 # Compute regions and build DimensionRecords for each visit. 

475 # This is the only parallel step, but it _should_ be the most expensive 

476 # one (unless DB operations are slow). 

477 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions)) 

478 allRecords = mapFunc(lambda d: self._buildVisitRecords(d, collections=collections), definitions) 

479 # Iterate over visits and insert dimension data, one transaction per 

480 # visit. 

481 for visitRecords in allRecords: 

482 with self.butler.registry.transaction(): 

483 self.butler.registry.insertDimensionData("visit", visitRecords.visit) 

484 self.butler.registry.insertDimensionData("visit_definition", 

485 *visitRecords.visit_definition) 

486 self.butler.registry.insertDimensionData("visit_detector_region", 

487 *visitRecords.visit_detector_region) 

488 

489 

490def _reduceOrNone(func, iterable): 

491 """Apply a binary function to pairs of elements in an iterable until a 

492 single value is returned, but return `None` if any element is `None` or 

493 there are no elements. 

494 """ 

495 r = None 

496 for v in iterable: 

497 if v is None: 

498 return None 

499 if r is None: 

500 r = v 

501 else: 

502 r = func(r, v) 

503 return r 

504 

505 

506class _GroupExposuresOneToOneConfig(GroupExposuresConfig): 

507 visitSystemId = Field( 

508 doc=("Integer ID of the visit_system implemented by this grouping " 

509 "algorithm."), 

510 dtype=int, 

511 default=0, 

512 ) 

513 visitSystemName = Field( 

514 doc=("String name of the visit_system implemented by this grouping " 

515 "algorithm."), 

516 dtype=str, 

517 default="one-to-one", 

518 ) 

519 

520 

521@registerConfigurable("one-to-one", GroupExposuresTask.registry) 

522class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta): 

523 """An exposure grouping algorithm that simply defines one visit for each 

524 exposure, reusing the exposures identifiers for the visit. 

525 """ 

526 

527 ConfigClass = _GroupExposuresOneToOneConfig 

528 

529 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

530 # Docstring inherited from GroupExposuresTask. 

531 for exposure in exposures: 

532 yield VisitDefinitionData( 

533 instrument=exposure.instrument, 

534 id=exposure.id, 

535 name=exposure.name, 

536 exposures=[exposure], 

537 ) 

538 

539 def getVisitSystem(self) -> Tuple[int, str]: 

540 # Docstring inherited from GroupExposuresTask. 

541 return (self.config.visitSystemId, self.config.visitSystemName) 

542 

543 

544class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig): 

545 visitSystemId = Field( 

546 doc=("Integer ID of the visit_system implemented by this grouping " 

547 "algorithm."), 

548 dtype=int, 

549 default=1, 

550 ) 

551 visitSystemName = Field( 

552 doc=("String name of the visit_system implemented by this grouping " 

553 "algorithm."), 

554 dtype=str, 

555 default="by-group-metadata", 

556 ) 

557 

558 

559@registerConfigurable("by-group-metadata", GroupExposuresTask.registry) 

560class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta): 

561 """An exposure grouping algorithm that uses exposure.group_name and 

562 exposure.group_id. 

563 

564 This algorithm _assumes_ exposure.group_id (generally populated from 

565 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique, 

566 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't, 

567 it will be impossible to ever use both this grouping algorithm and the 

568 one-to-one algorithm for a particular camera in the same data repository. 

569 """ 

570 

571 ConfigClass = _GroupExposuresByGroupMetadataConfig 

572 

573 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

574 # Docstring inherited from GroupExposuresTask. 

575 groups = defaultdict(list) 

576 for exposure in exposures: 

577 groups[exposure.group_name].append(exposure) 

578 for visitName, exposuresInGroup in groups.items(): 

579 instrument = exposuresInGroup[0].instrument 

580 visitId = exposuresInGroup[0].group_id 

581 assert all(e.group_id == visitId for e in exposuresInGroup), \ 

582 "Grouping by exposure.group_name does not yield consistent group IDs" 

583 yield VisitDefinitionData(instrument=instrument, id=visitId, name=visitName, 

584 exposures=exposuresInGroup) 

585 

586 def getVisitSystem(self) -> Tuple[int, str]: 

587 # Docstring inherited from GroupExposuresTask. 

588 return (self.config.visitSystemId, self.config.visitSystemName) 

589 

590 

591class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig): 

592 mergeExposures = Field( 

593 doc=("If True, merge per-detector regions over all exposures in a " 

594 "visit (via convex hull) instead of using the first exposure and " 

595 "assuming its regions are valid for all others."), 

596 dtype=bool, 

597 default=False, 

598 ) 

599 detectorId = Field( 

600 doc=("Load the WCS for the detector with this ID. If None, use an " 

601 "arbitrary detector (the first found in a query of the data " 

602 "repository for each exposure (or all exposures, if " 

603 "mergeExposures is True)."), 

604 dtype=int, 

605 optional=True, 

606 default=None 

607 ) 

608 requireVersionedCamera = Field( 

609 doc=("If True, raise LookupError if version camera geometry cannot be " 

610 "loaded for an exposure. If False, use the nominal camera from " 

611 "the Instrument class instead."), 

612 dtype=bool, 

613 optional=False, 

614 default=False, 

615 ) 

616 

617 

618@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry) 

619class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask): 

620 """A visit region calculator that uses a single raw WCS and a camera to 

621 project the bounding boxes of all detectors onto the sky, relating 

622 different detectors by their positions in focal plane coordinates. 

623 

624 Notes 

625 ----- 

626 Most instruments should have their raw WCSs determined from a combination 

627 of boresight angle, rotator angle, and camera geometry, and hence this 

628 algorithm should produce stable results regardless of which detector the 

629 raw corresponds to. If this is not the case (e.g. because a per-file FITS 

630 WCS is used instead), either the ID of the detector should be fixed (see 

631 the ``detectorId`` config parameter) or a different algorithm used. 

632 """ 

633 

634 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig 

635 

636 def computeExposureBounds(self, exposure: DimensionRecord, *, collections: Any = None 

637 ) -> Dict[int, List[UnitVector3d]]: 

638 """Compute the lists of unit vectors on the sphere that correspond to 

639 the sky positions of detector corners. 

640 

641 Parameters 

642 ---------- 

643 exposure : `DimensionRecord` 

644 Dimension record for the exposure. 

645 collections : Any, optional 

646 Collections to be searched for raws and camera geometry, overriding 

647 ``self.butler.collections``. 

648 Can be any of the types supported by the ``collections`` argument 

649 to butler construction. 

650 

651 Returns 

652 ------- 

653 bounds : `dict` 

654 Dictionary mapping detector ID to a list of unit vectors on the 

655 sphere representing that detector's corners projected onto the sky. 

656 """ 

657 if collections is None: 

658 collections = self.butler.collections 

659 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections) 

660 if not versioned and self.config.requireVersionedCamera: 

661 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.") 

662 

663 # Derive WCS from boresight information -- if available in registry 

664 use_registry = True 

665 try: 

666 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees) 

667 radec = lsst.geom.SpherePoint(lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees), 

668 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees)) 

669 except AttributeError: 

670 use_registry = False 

671 

672 if use_registry: 

673 if self.config.detectorId is None: 

674 detectorId = next(camera.getIdIter()) 

675 else: 

676 detectorId = self.config.detectorId 

677 wcsDetector = camera[detectorId] 

678 

679 # Ask the raw formatter to create the relevant WCS 

680 # This allows flips to be taken into account 

681 instrument = self.getInstrument(exposure.instrument) 

682 rawFormatter = instrument.getRawFormatter({"detector": detectorId}) 

683 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector) 

684 

685 else: 

686 if self.config.detectorId is None: 

687 wcsRefsIter = self.butler.registry.queryDatasets("raw.wcs", dataId=exposure.dataId, 

688 collections=collections) 

689 if not wcsRefsIter: 

690 raise LookupError(f"No raw.wcs datasets found for data ID {exposure.dataId} " 

691 f"in collections {collections}.") 

692 wcsRef = next(iter(wcsRefsIter)) 

693 wcsDetector = camera[wcsRef.dataId["detector"]] 

694 wcs = self.butler.getDirect(wcsRef) 

695 else: 

696 wcsDetector = camera[self.config.detectorId] 

697 wcs = self.butler.get("raw.wcs", dataId=exposure.dataId, detector=self.config.detectorId, 

698 collections=collections) 

699 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform()) 

700 bounds = {} 

701 for detector in camera: 

702 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky) 

703 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners() 

704 bounds[detector.getId()] = [ 

705 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners) 

706 ] 

707 return bounds 

708 

709 def compute(self, visit: VisitDefinitionData, *, collections: Any = None 

710 ) -> Tuple[Region, Dict[int, Region]]: 

711 # Docstring inherited from ComputeVisitRegionsTask. 

712 if self.config.mergeExposures: 

713 detectorBounds = defaultdict(list) 

714 for exposure in visit.exposures: 

715 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections) 

716 for detectorId, bounds in exposureDetectorBounds.items(): 

717 detectorBounds[detectorId].extend(bounds) 

718 else: 

719 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections) 

720 visitBounds = [] 

721 detectorRegions = {} 

722 for detectorId, bounds in detectorBounds.items(): 

723 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds) 

724 visitBounds.extend(bounds) 

725 return ConvexPolygon.convexHull(visitBounds), detectorRegions