Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = [ 

25 "DefineVisitsConfig", 

26 "DefineVisitsTask", 

27 "GroupExposuresConfig", 

28 "GroupExposuresTask", 

29 "VisitDefinitionData", 

30] 

31 

32from abc import ABCMeta, abstractmethod 

33from collections import defaultdict 

34import itertools 

35import dataclasses 

36from typing import Any, Dict, Iterable, List, Optional, Tuple 

37from multiprocessing import Pool 

38 

39from lsst.daf.butler import ( 

40 Butler, 

41 DataCoordinate, 

42 DataId, 

43 DimensionGraph, 

44 DimensionRecord, 

45 Progress, 

46 Timespan, 

47) 

48 

49import lsst.geom 

50from lsst.geom import Box2D 

51from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable 

52from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS 

53from lsst.pipe.base import Task 

54from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d 

55from ._instrument import loadCamera, Instrument 

56 

57 

58@dataclasses.dataclass 

59class VisitDefinitionData: 

60 """Struct representing a group of exposures that will be used to define a 

61 visit. 

62 """ 

63 

64 instrument: str 

65 """Name of the instrument this visit will be associated with. 

66 """ 

67 

68 id: int 

69 """Integer ID of the visit. 

70 

71 This must be unique across all visit systems for the instrument. 

72 """ 

73 

74 name: str 

75 """String name for the visit. 

76 

77 This must be unique across all visit systems for the instrument. 

78 """ 

79 

80 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list) 

81 """Dimension records for the exposures that are part of this visit. 

82 """ 

83 

84 

85@dataclasses.dataclass 

86class _VisitRecords: 

87 """Struct containing the dimension records associated with a visit. 

88 """ 

89 

90 visit: DimensionRecord 

91 """Record for the 'visit' dimension itself. 

92 """ 

93 

94 visit_definition: List[DimensionRecord] 

95 """Records for 'visit_definition', which relates 'visit' to 'exposure'. 

96 """ 

97 

98 visit_detector_region: List[DimensionRecord] 

99 """Records for 'visit_detector_region', which associates the combination 

100 of a 'visit' and a 'detector' with a region on the sky. 

101 """ 

102 

103 

104class GroupExposuresConfig(Config): 

105 pass 

106 

107 

108class GroupExposuresTask(Task, metaclass=ABCMeta): 

109 """Abstract base class for the subtask of `DefineVisitsTask` that is 

110 responsible for grouping exposures into visits. 

111 

112 Subclasses should be registered with `GroupExposuresTask.registry` to 

113 enable use by `DefineVisitsTask`, and should generally correspond to a 

114 particular 'visit_system' dimension value. They are also responsible for 

115 defining visit IDs and names that are unique across all visit systems in 

116 use by an instrument. 

117 

118 Parameters 

119 ---------- 

120 config : `GroupExposuresConfig` 

121 Configuration information. 

122 **kwargs 

123 Additional keyword arguments forwarded to the `Task` constructor. 

124 """ 

125 def __init__(self, config: GroupExposuresConfig, **kwargs: Any): 

126 Task.__init__(self, config=config, **kwargs) 

127 

128 ConfigClass = GroupExposuresConfig 

129 

130 _DefaultName = "groupExposures" 

131 

132 registry = makeRegistry( 

133 doc="Registry of algorithms for grouping exposures into visits.", 

134 configBaseType=GroupExposuresConfig, 

135 ) 

136 

137 @abstractmethod 

138 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

139 """Group the given exposures into visits. 

140 

141 Parameters 

142 ---------- 

143 exposures : `list` [ `DimensionRecord` ] 

144 DimensionRecords (for the 'exposure' dimension) describing the 

145 exposures to group. 

146 

147 Returns 

148 ------- 

149 visits : `Iterable` [ `VisitDefinitionData` ] 

150 Structs identifying the visits and the exposures associated with 

151 them. This may be an iterator or a container. 

152 """ 

153 raise NotImplementedError() 

154 

155 @abstractmethod 

156 def getVisitSystem(self) -> Tuple[int, str]: 

157 """Return identifiers for the 'visit_system' dimension this 

158 algorithm implements. 

159 

160 Returns 

161 ------- 

162 id : `int` 

163 Integer ID for the visit system (given an instrument). 

164 name : `str` 

165 Unique string identifier for the visit system (given an 

166 instrument). 

167 """ 

168 raise NotImplementedError() 

169 

170 

171class ComputeVisitRegionsConfig(Config): 

172 padding = Field( 

173 dtype=int, 

174 default=0, 

175 doc=("Pad raw image bounding boxes with specified number of pixels " 

176 "when calculating their (conservatively large) region on the " 

177 "sky."), 

178 ) 

179 

180 

181class ComputeVisitRegionsTask(Task, metaclass=ABCMeta): 

182 """Abstract base class for the subtask of `DefineVisitsTask` that is 

183 responsible for extracting spatial regions for visits and visit+detector 

184 combinations. 

185 

186 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to 

187 enable use by `DefineVisitsTask`. 

188 

189 Parameters 

190 ---------- 

191 config : `ComputeVisitRegionsConfig` 

192 Configuration information. 

193 butler : `lsst.daf.butler.Butler` 

194 The butler to use. 

195 **kwargs 

196 Additional keyword arguments forwarded to the `Task` constructor. 

197 """ 

198 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any): 

199 Task.__init__(self, config=config, **kwargs) 

200 self.butler = butler 

201 self.instrumentMap = {} 

202 

203 ConfigClass = ComputeVisitRegionsConfig 

204 

205 _DefaultName = "computeVisitRegions" 

206 

207 registry = makeRegistry( 

208 doc=("Registry of algorithms for computing on-sky regions for visits " 

209 "and visit+detector combinations."), 

210 configBaseType=ComputeVisitRegionsConfig, 

211 ) 

212 

213 def getInstrument(self, instrumentName) -> Instrument: 

214 """Retrieve an `~lsst.obs.base.Instrument` associated with this 

215 instrument name. 

216 

217 Parameters 

218 ---------- 

219 instrumentName : `str` 

220 The name of the instrument. 

221 

222 Returns 

223 ------- 

224 instrument : `~lsst.obs.base.Instrument` 

225 The associated instrument object. 

226 

227 Notes 

228 ----- 

229 The result is cached. 

230 """ 

231 instrument = self.instrumentMap.get(instrumentName) 

232 if instrument is None: 

233 instrument = Instrument.fromName(instrumentName, self.butler.registry) 

234 self.instrumentMap[instrumentName] = instrument 

235 return instrument 

236 

237 @abstractmethod 

238 def compute(self, visit: VisitDefinitionData, *, collections: Any = None 

239 ) -> Tuple[Region, Dict[int, Region]]: 

240 """Compute regions for the given visit and all detectors in that visit. 

241 

242 Parameters 

243 ---------- 

244 visit : `VisitDefinitionData` 

245 Struct describing the visit and the exposures associated with it. 

246 collections : Any, optional 

247 Collections to be searched for raws and camera geometry, overriding 

248 ``self.butler.collections``. 

249 Can be any of the types supported by the ``collections`` argument 

250 to butler construction. 

251 

252 Returns 

253 ------- 

254 visitRegion : `lsst.sphgeom.Region` 

255 Region for the full visit. 

256 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ] 

257 Dictionary mapping detector ID to the region for that detector. 

258 Should include all detectors in the visit. 

259 """ 

260 raise NotImplementedError() 

261 

262 

263class DefineVisitsConfig(Config): 

264 groupExposures = GroupExposuresTask.registry.makeField( 

265 doc="Algorithm for grouping exposures into visits.", 

266 default="one-to-one", 

267 ) 

268 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField( 

269 doc="Algorithm from computing visit and visit+detector regions.", 

270 default="single-raw-wcs", 

271 ) 

272 ignoreNonScienceExposures = Field( 

273 doc=("If True, silently ignore input exposures that do not have " 

274 "observation_type=SCIENCE. If False, raise an exception if one " 

275 "encountered."), 

276 dtype=bool, 

277 optional=False, 

278 default=True, 

279 ) 

280 

281 

282class DefineVisitsTask(Task): 

283 """Driver Task for defining visits (and their spatial regions) in Gen3 

284 Butler repositories. 

285 

286 Parameters 

287 ---------- 

288 config : `DefineVisitsConfig` 

289 Configuration for the task. 

290 butler : `~lsst.daf.butler.Butler` 

291 Writeable butler instance. Will be used to read `raw.wcs` and `camera` 

292 datasets and insert/sync dimension data. 

293 **kwargs 

294 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task` 

295 constructor. 

296 

297 Notes 

298 ----- 

299 Each instance of `DefineVisitsTask` reads from / writes to the same Butler. 

300 Each invocation of `DefineVisitsTask.run` processes an independent group of 

301 exposures into one or more new vists, all belonging to the same visit 

302 system and instrument. 

303 

304 The actual work of grouping exposures and computing regions is delegated 

305 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`), 

306 respectively. The defaults are to create one visit for every exposure, 

307 and to use exactly one (arbitrary) detector-level raw dataset's WCS along 

308 with camera geometry to compute regions for all detectors. Other 

309 implementations can be created and configured for instruments for which 

310 these choices are unsuitable (e.g. because visits and exposures are not 

311 one-to-one, or because ``raw.wcs`` datasets for different detectors may not 

312 be consistent with camera geomery). 

313 

314 It is not necessary in general to ingest all raws for an exposure before 

315 defining a visit that includes the exposure; this depends entirely on the 

316 `ComputeVisitRegionTask` subclass used. For the default configuration, 

317 a single raw for each exposure is sufficient. 

318 

319 Defining the same visit the same way multiple times (e.g. via multiple 

320 invocations of this task on the same exposures, with the same 

321 configuration) is safe, but it may be inefficient, as most of the work must 

322 be done before new visits can be compared to existing visits. 

323 """ 

324 def __init__(self, config: Optional[DefineVisitsConfig] = None, *, butler: Butler, **kwargs: Any): 

325 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here. 

326 super().__init__(config, **kwargs) 

327 self.butler = butler 

328 self.universe = self.butler.registry.dimensions 

329 self.progress = Progress("obs.base.DefineVisitsTask") 

330 self.makeSubtask("groupExposures") 

331 self.makeSubtask("computeVisitRegions", butler=self.butler) 

332 

333 def _reduce_kwargs(self): 

334 # Add extra parameters to pickle 

335 return dict(**super()._reduce_kwargs(), butler=self.butler) 

336 

337 ConfigClass = DefineVisitsConfig 

338 

339 _DefaultName = "defineVisits" 

340 

341 def _buildVisitRecords(self, definition: VisitDefinitionData, *, 

342 collections: Any = None) -> _VisitRecords: 

343 """Build the DimensionRecords associated with a visit. 

344 

345 Parameters 

346 ---------- 

347 definition : `VisitDefinition` 

348 Struct with identifiers for the visit and records for its 

349 constituent exposures. 

350 collections : Any, optional 

351 Collections to be searched for raws and camera geometry, overriding 

352 ``self.butler.collections``. 

353 Can be any of the types supported by the ``collections`` argument 

354 to butler construction. 

355 

356 Results 

357 ------- 

358 records : `_VisitRecords` 

359 Struct containing DimensionRecords for the visit, including 

360 associated dimension elements. 

361 """ 

362 # Compute all regions. 

363 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute(definition, 

364 collections=collections) 

365 # Aggregate other exposure quantities. 

366 timespan = Timespan( 

367 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)), 

368 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)), 

369 ) 

370 exposure_time = _reduceOrNone(sum, (e.exposure_time for e in definition.exposures)) 

371 physical_filter = _reduceOrNone(lambda a, b: a if a == b else None, 

372 (e.physical_filter for e in definition.exposures)) 

373 target_name = _reduceOrNone(lambda a, b: a if a == b else None, 

374 (e.target_name for e in definition.exposures)) 

375 science_program = _reduceOrNone(lambda a, b: a if a == b else None, 

376 (e.science_program for e in definition.exposures)) 

377 

378 # observing day for a visit is defined by the earliest observation 

379 # of the visit 

380 observing_day = _reduceOrNone(min, (e.day_obs for e in definition.exposures)) 

381 observation_reason = _reduceOrNone(lambda a, b: a if a == b else None, 

382 (e.observation_reason for e in definition.exposures)) 

383 if observation_reason is None: 

384 # Be explicit about there being multiple reasons 

385 observation_reason = "various" 

386 

387 # Use the mean zenith angle as an approximation 

388 zenith_angle = _reduceOrNone(sum, (e.zenith_angle for e in definition.exposures)) 

389 if zenith_angle is not None: 

390 zenith_angle /= len(definition.exposures) 

391 

392 # Construct the actual DimensionRecords. 

393 return _VisitRecords( 

394 visit=self.universe["visit"].RecordClass( 

395 instrument=definition.instrument, 

396 id=definition.id, 

397 name=definition.name, 

398 physical_filter=physical_filter, 

399 target_name=target_name, 

400 science_program=science_program, 

401 observation_reason=observation_reason, 

402 day_obs=observing_day, 

403 zenith_angle=zenith_angle, 

404 visit_system=self.groupExposures.getVisitSystem()[0], 

405 exposure_time=exposure_time, 

406 timespan=timespan, 

407 region=visitRegion, 

408 # TODO: no seeing value in exposure dimension records, so we 

409 # can't set that here. But there are many other columns that 

410 # both dimensions should probably have as well. 

411 ), 

412 visit_definition=[ 

413 self.universe["visit_definition"].RecordClass( 

414 instrument=definition.instrument, 

415 visit=definition.id, 

416 exposure=exposure.id, 

417 visit_system=self.groupExposures.getVisitSystem()[0], 

418 ) 

419 for exposure in definition.exposures 

420 ], 

421 visit_detector_region=[ 

422 self.universe["visit_detector_region"].RecordClass( 

423 instrument=definition.instrument, 

424 visit=definition.id, 

425 detector=detectorId, 

426 region=detectorRegion, 

427 ) 

428 for detectorId, detectorRegion in visitDetectorRegions.items() 

429 ] 

430 ) 

431 

432 def _expandExposureId(self, dataId: DataId) -> DataCoordinate: 

433 """Return the expanded version of an exposure ID. 

434 

435 A private method to allow ID expansion in a pool without resorting 

436 to local callables. 

437 

438 Parameters 

439 ---------- 

440 dataId : `dict` or `DataCoordinate` 

441 Exposure-level data ID. 

442 

443 Returns 

444 ------- 

445 expanded : `DataCoordinate` 

446 A data ID that includes full metadata for all exposure dimensions. 

447 """ 

448 dimensions = DimensionGraph(self.universe, names=["exposure"]) 

449 return self.butler.registry.expandDataId(dataId, graph=dimensions) 

450 

451 def _buildVisitRecordsSingle(self, args) -> _VisitRecords: 

452 """Build the DimensionRecords associated with a visit and collection. 

453 

454 A wrapper for `_buildVisitRecords` to allow it to be run as part of 

455 a pool without resorting to local callables. 

456 

457 Parameters 

458 ---------- 

459 args : `tuple` [`VisitDefinition`, any] 

460 A tuple consisting of the ``definition`` and ``collections`` 

461 arguments to `_buildVisitRecords`, in that order. 

462 

463 Results 

464 ------- 

465 records : `_VisitRecords` 

466 Struct containing DimensionRecords for the visit, including 

467 associated dimension elements. 

468 """ 

469 return self._buildVisitRecords(args[0], collections=args[1]) 

470 

471 def run(self, dataIds: Iterable[DataId], *, 

472 pool: Optional[Pool] = None, 

473 processes: int = 1, 

474 collections: Optional[str] = None): 

475 """Add visit definitions to the registry for the given exposures. 

476 

477 Parameters 

478 ---------- 

479 dataIds : `Iterable` [ `dict` or `DataCoordinate` ] 

480 Exposure-level data IDs. These must all correspond to the same 

481 instrument, and are expected to be on-sky science exposures. 

482 pool : `multiprocessing.Pool`, optional 

483 If not `None`, a process pool with which to parallelize some 

484 operations. 

485 processes : `int`, optional 

486 The number of processes to use. Ignored if ``pool`` is not `None`. 

487 collections : Any, optional 

488 Collections to be searched for raws and camera geometry, overriding 

489 ``self.butler.collections``. 

490 Can be any of the types supported by the ``collections`` argument 

491 to butler construction. 

492 

493 Raises 

494 ------ 

495 lsst.daf.butler.registry.ConflictingDefinitionError 

496 Raised if a visit ID conflict is detected and the existing visit 

497 differs from the new one. 

498 """ 

499 # Set up multiprocessing, if desired. 

500 if pool is None and processes > 1: 

501 pool = Pool(processes) 

502 mapFunc = map if pool is None else pool.imap_unordered 

503 # Normalize, expand, and deduplicate data IDs. 

504 self.log.info("Preprocessing data IDs.") 

505 dataIds = set(mapFunc(self._expandExposureId, dataIds)) 

506 if not dataIds: 

507 raise RuntimeError("No exposures given.") 

508 # Extract exposure DimensionRecords, check that there's only one 

509 # instrument in play, and check for non-science exposures. 

510 exposures = [] 

511 instruments = set() 

512 for dataId in dataIds: 

513 record = dataId.records["exposure"] 

514 if record.observation_type != "science": 

515 if self.config.ignoreNonScienceExposures: 

516 continue 

517 else: 

518 raise RuntimeError(f"Input exposure {dataId} has observation_type " 

519 f"{record.observation_type}, not 'science'.") 

520 instruments.add(dataId["instrument"]) 

521 exposures.append(record) 

522 if not exposures: 

523 self.log.info("No science exposures found after filtering.") 

524 return 

525 if len(instruments) > 1: 

526 raise RuntimeError( 

527 f"All data IDs passed to DefineVisitsTask.run must be " 

528 f"from the same instrument; got {instruments}." 

529 ) 

530 instrument, = instruments 

531 # Ensure the visit_system our grouping algorithm uses is in the 

532 # registry, if it wasn't already. 

533 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem() 

534 self.log.info("Registering visit_system %d: %s.", visitSystemId, visitSystemName) 

535 self.butler.registry.syncDimensionData( 

536 "visit_system", 

537 {"instrument": instrument, "id": visitSystemId, "name": visitSystemName} 

538 ) 

539 # Group exposures into visits, delegating to subtask. 

540 self.log.info("Grouping %d exposure(s) into visits.", len(exposures)) 

541 definitions = list(self.groupExposures.group(exposures)) 

542 # Compute regions and build DimensionRecords for each visit. 

543 # This is the only parallel step, but it _should_ be the most expensive 

544 # one (unless DB operations are slow). 

545 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions)) 

546 allRecords = mapFunc(self._buildVisitRecordsSingle, 

547 zip(definitions, itertools.repeat(collections))) 

548 # Iterate over visits and insert dimension data, one transaction per 

549 # visit. If a visit already exists, we skip all other inserts. 

550 for visitRecords in self.progress.wrap(allRecords, total=len(definitions), 

551 desc="Computing regions and inserting visits"): 

552 with self.butler.registry.transaction(): 

553 if self.butler.registry.syncDimensionData("visit", visitRecords.visit): 

554 self.butler.registry.insertDimensionData("visit_definition", 

555 *visitRecords.visit_definition) 

556 self.butler.registry.insertDimensionData("visit_detector_region", 

557 *visitRecords.visit_detector_region) 

558 

559 

560def _reduceOrNone(func, iterable): 

561 """Apply a binary function to pairs of elements in an iterable until a 

562 single value is returned, but return `None` if any element is `None` or 

563 there are no elements. 

564 """ 

565 r = None 

566 for v in iterable: 

567 if v is None: 

568 return None 

569 if r is None: 

570 r = v 

571 else: 

572 r = func(r, v) 

573 return r 

574 

575 

576class _GroupExposuresOneToOneConfig(GroupExposuresConfig): 

577 visitSystemId = Field( 

578 doc=("Integer ID of the visit_system implemented by this grouping " 

579 "algorithm."), 

580 dtype=int, 

581 default=0, 

582 ) 

583 visitSystemName = Field( 

584 doc=("String name of the visit_system implemented by this grouping " 

585 "algorithm."), 

586 dtype=str, 

587 default="one-to-one", 

588 ) 

589 

590 

591@registerConfigurable("one-to-one", GroupExposuresTask.registry) 

592class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta): 

593 """An exposure grouping algorithm that simply defines one visit for each 

594 exposure, reusing the exposures identifiers for the visit. 

595 """ 

596 

597 ConfigClass = _GroupExposuresOneToOneConfig 

598 

599 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

600 # Docstring inherited from GroupExposuresTask. 

601 for exposure in exposures: 

602 yield VisitDefinitionData( 

603 instrument=exposure.instrument, 

604 id=exposure.id, 

605 name=exposure.obs_id, 

606 exposures=[exposure], 

607 ) 

608 

609 def getVisitSystem(self) -> Tuple[int, str]: 

610 # Docstring inherited from GroupExposuresTask. 

611 return (self.config.visitSystemId, self.config.visitSystemName) 

612 

613 

614class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig): 

615 visitSystemId = Field( 

616 doc=("Integer ID of the visit_system implemented by this grouping " 

617 "algorithm."), 

618 dtype=int, 

619 default=1, 

620 ) 

621 visitSystemName = Field( 

622 doc=("String name of the visit_system implemented by this grouping " 

623 "algorithm."), 

624 dtype=str, 

625 default="by-group-metadata", 

626 ) 

627 

628 

629@registerConfigurable("by-group-metadata", GroupExposuresTask.registry) 

630class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta): 

631 """An exposure grouping algorithm that uses exposure.group_name and 

632 exposure.group_id. 

633 

634 This algorithm _assumes_ exposure.group_id (generally populated from 

635 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique, 

636 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't, 

637 it will be impossible to ever use both this grouping algorithm and the 

638 one-to-one algorithm for a particular camera in the same data repository. 

639 """ 

640 

641 ConfigClass = _GroupExposuresByGroupMetadataConfig 

642 

643 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

644 # Docstring inherited from GroupExposuresTask. 

645 groups = defaultdict(list) 

646 for exposure in exposures: 

647 groups[exposure.group_name].append(exposure) 

648 for visitName, exposuresInGroup in groups.items(): 

649 instrument = exposuresInGroup[0].instrument 

650 visitId = exposuresInGroup[0].group_id 

651 assert all(e.group_id == visitId for e in exposuresInGroup), \ 

652 "Grouping by exposure.group_name does not yield consistent group IDs" 

653 yield VisitDefinitionData(instrument=instrument, id=visitId, name=visitName, 

654 exposures=exposuresInGroup) 

655 

656 def getVisitSystem(self) -> Tuple[int, str]: 

657 # Docstring inherited from GroupExposuresTask. 

658 return (self.config.visitSystemId, self.config.visitSystemName) 

659 

660 

661class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig): 

662 mergeExposures = Field( 

663 doc=("If True, merge per-detector regions over all exposures in a " 

664 "visit (via convex hull) instead of using the first exposure and " 

665 "assuming its regions are valid for all others."), 

666 dtype=bool, 

667 default=False, 

668 ) 

669 detectorId = Field( 

670 doc=("Load the WCS for the detector with this ID. If None, use an " 

671 "arbitrary detector (the first found in a query of the data " 

672 "repository for each exposure (or all exposures, if " 

673 "mergeExposures is True)."), 

674 dtype=int, 

675 optional=True, 

676 default=None 

677 ) 

678 requireVersionedCamera = Field( 

679 doc=("If True, raise LookupError if version camera geometry cannot be " 

680 "loaded for an exposure. If False, use the nominal camera from " 

681 "the Instrument class instead."), 

682 dtype=bool, 

683 optional=False, 

684 default=False, 

685 ) 

686 

687 

688@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry) 

689class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask): 

690 """A visit region calculator that uses a single raw WCS and a camera to 

691 project the bounding boxes of all detectors onto the sky, relating 

692 different detectors by their positions in focal plane coordinates. 

693 

694 Notes 

695 ----- 

696 Most instruments should have their raw WCSs determined from a combination 

697 of boresight angle, rotator angle, and camera geometry, and hence this 

698 algorithm should produce stable results regardless of which detector the 

699 raw corresponds to. If this is not the case (e.g. because a per-file FITS 

700 WCS is used instead), either the ID of the detector should be fixed (see 

701 the ``detectorId`` config parameter) or a different algorithm used. 

702 """ 

703 

704 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig 

705 

706 def computeExposureBounds(self, exposure: DimensionRecord, *, collections: Any = None 

707 ) -> Dict[int, List[UnitVector3d]]: 

708 """Compute the lists of unit vectors on the sphere that correspond to 

709 the sky positions of detector corners. 

710 

711 Parameters 

712 ---------- 

713 exposure : `DimensionRecord` 

714 Dimension record for the exposure. 

715 collections : Any, optional 

716 Collections to be searched for raws and camera geometry, overriding 

717 ``self.butler.collections``. 

718 Can be any of the types supported by the ``collections`` argument 

719 to butler construction. 

720 

721 Returns 

722 ------- 

723 bounds : `dict` 

724 Dictionary mapping detector ID to a list of unit vectors on the 

725 sphere representing that detector's corners projected onto the sky. 

726 """ 

727 if collections is None: 

728 collections = self.butler.collections 

729 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections) 

730 if not versioned and self.config.requireVersionedCamera: 

731 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.") 

732 

733 # Derive WCS from boresight information -- if available in registry 

734 use_registry = True 

735 try: 

736 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees) 

737 radec = lsst.geom.SpherePoint(lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees), 

738 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees)) 

739 except AttributeError: 

740 use_registry = False 

741 

742 if use_registry: 

743 if self.config.detectorId is None: 

744 detectorId = next(camera.getIdIter()) 

745 else: 

746 detectorId = self.config.detectorId 

747 wcsDetector = camera[detectorId] 

748 

749 # Ask the raw formatter to create the relevant WCS 

750 # This allows flips to be taken into account 

751 instrument = self.getInstrument(exposure.instrument) 

752 rawFormatter = instrument.getRawFormatter({"detector": detectorId}) 

753 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector) 

754 

755 else: 

756 if self.config.detectorId is None: 

757 wcsRefsIter = self.butler.registry.queryDatasets("raw.wcs", dataId=exposure.dataId, 

758 collections=collections) 

759 if not wcsRefsIter: 

760 raise LookupError(f"No raw.wcs datasets found for data ID {exposure.dataId} " 

761 f"in collections {collections}.") 

762 wcsRef = next(iter(wcsRefsIter)) 

763 wcsDetector = camera[wcsRef.dataId["detector"]] 

764 wcs = self.butler.getDirect(wcsRef) 

765 else: 

766 wcsDetector = camera[self.config.detectorId] 

767 wcs = self.butler.get("raw.wcs", dataId=exposure.dataId, detector=self.config.detectorId, 

768 collections=collections) 

769 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform()) 

770 bounds = {} 

771 for detector in camera: 

772 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky) 

773 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners() 

774 bounds[detector.getId()] = [ 

775 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners) 

776 ] 

777 return bounds 

778 

779 def compute(self, visit: VisitDefinitionData, *, collections: Any = None 

780 ) -> Tuple[Region, Dict[int, Region]]: 

781 # Docstring inherited from ComputeVisitRegionsTask. 

782 if self.config.mergeExposures: 

783 detectorBounds = defaultdict(list) 

784 for exposure in visit.exposures: 

785 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections) 

786 for detectorId, bounds in exposureDetectorBounds.items(): 

787 detectorBounds[detectorId].extend(bounds) 

788 else: 

789 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections) 

790 visitBounds = [] 

791 detectorRegions = {} 

792 for detectorId, bounds in detectorBounds.items(): 

793 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds) 

794 visitBounds.extend(bounds) 

795 return ConvexPolygon.convexHull(visitBounds), detectorRegions