Coverage for python/lsst/obs/base/defineVisits.py: 35%

Shortcuts on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

233 statements  

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = [ 

25 "DefineVisitsConfig", 

26 "DefineVisitsTask", 

27 "GroupExposuresConfig", 

28 "GroupExposuresTask", 

29 "VisitDefinitionData", 

30] 

31 

32import dataclasses 

33import operator 

34from abc import ABCMeta, abstractmethod 

35from collections import defaultdict 

36from typing import Any, Callable, ClassVar, Dict, Iterable, List, Optional, Set, Tuple, TypeVar 

37 

38import lsst.geom 

39from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS 

40from lsst.daf.butler import ( 

41 Butler, 

42 DataCoordinate, 

43 DataId, 

44 DimensionGraph, 

45 DimensionRecord, 

46 Progress, 

47 Timespan, 

48) 

49from lsst.geom import Box2D 

50from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable 

51from lsst.pipe.base import Instrument, Task 

52from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d 

53from lsst.utils.introspection import get_full_type_name 

54 

55from ._instrument import loadCamera 

56 

57 

58@dataclasses.dataclass 

59class VisitDefinitionData: 

60 """Struct representing a group of exposures that will be used to define a 

61 visit. 

62 """ 

63 

64 instrument: str 

65 """Name of the instrument this visit will be associated with. 

66 """ 

67 

68 id: int 

69 """Integer ID of the visit. 

70 

71 This must be unique across all visit systems for the instrument. 

72 """ 

73 

74 name: str 

75 """String name for the visit. 

76 

77 This must be unique across all visit systems for the instrument. 

78 """ 

79 

80 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list) 

81 """Dimension records for the exposures that are part of this visit. 

82 """ 

83 

84 

85@dataclasses.dataclass 

86class _VisitRecords: 

87 """Struct containing the dimension records associated with a visit.""" 

88 

89 visit: DimensionRecord 

90 """Record for the 'visit' dimension itself. 

91 """ 

92 

93 visit_definition: List[DimensionRecord] 

94 """Records for 'visit_definition', which relates 'visit' to 'exposure'. 

95 """ 

96 

97 visit_detector_region: List[DimensionRecord] 

98 """Records for 'visit_detector_region', which associates the combination 

99 of a 'visit' and a 'detector' with a region on the sky. 

100 """ 

101 

102 

103class GroupExposuresConfig(Config): 

104 pass 

105 

106 

107class GroupExposuresTask(Task, metaclass=ABCMeta): 

108 """Abstract base class for the subtask of `DefineVisitsTask` that is 

109 responsible for grouping exposures into visits. 

110 

111 Subclasses should be registered with `GroupExposuresTask.registry` to 

112 enable use by `DefineVisitsTask`, and should generally correspond to a 

113 particular 'visit_system' dimension value. They are also responsible for 

114 defining visit IDs and names that are unique across all visit systems in 

115 use by an instrument. 

116 

117 Parameters 

118 ---------- 

119 config : `GroupExposuresConfig` 

120 Configuration information. 

121 **kwargs 

122 Additional keyword arguments forwarded to the `Task` constructor. 

123 """ 

124 

125 def __init__(self, config: GroupExposuresConfig, **kwargs: Any): 

126 Task.__init__(self, config=config, **kwargs) 

127 

128 ConfigClass = GroupExposuresConfig 

129 

130 _DefaultName = "groupExposures" 

131 

132 registry = makeRegistry( 

133 doc="Registry of algorithms for grouping exposures into visits.", 

134 configBaseType=GroupExposuresConfig, 

135 ) 

136 

137 @abstractmethod 

138 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

139 """Group the given exposures into visits. 

140 

141 Parameters 

142 ---------- 

143 exposures : `list` [ `DimensionRecord` ] 

144 DimensionRecords (for the 'exposure' dimension) describing the 

145 exposures to group. 

146 

147 Returns 

148 ------- 

149 visits : `Iterable` [ `VisitDefinitionData` ] 

150 Structs identifying the visits and the exposures associated with 

151 them. This may be an iterator or a container. 

152 """ 

153 raise NotImplementedError() 

154 

155 @abstractmethod 

156 def getVisitSystem(self) -> Tuple[int, str]: 

157 """Return identifiers for the 'visit_system' dimension this 

158 algorithm implements. 

159 

160 Returns 

161 ------- 

162 id : `int` 

163 Integer ID for the visit system (given an instrument). 

164 name : `str` 

165 Unique string identifier for the visit system (given an 

166 instrument). 

167 """ 

168 raise NotImplementedError() 

169 

170 

171class ComputeVisitRegionsConfig(Config): 

172 padding = Field( 

173 dtype=int, 

174 default=250, 

175 doc=( 

176 "Pad raw image bounding boxes with specified number of pixels " 

177 "when calculating their (conservatively large) region on the " 

178 "sky. Note that the config value for pixelMargin of the " 

179 "reference object loaders in meas_algorithms should be <= " 

180 "the value set here." 

181 ), 

182 ) 

183 

184 

185class ComputeVisitRegionsTask(Task, metaclass=ABCMeta): 

186 """Abstract base class for the subtask of `DefineVisitsTask` that is 

187 responsible for extracting spatial regions for visits and visit+detector 

188 combinations. 

189 

190 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to 

191 enable use by `DefineVisitsTask`. 

192 

193 Parameters 

194 ---------- 

195 config : `ComputeVisitRegionsConfig` 

196 Configuration information. 

197 butler : `lsst.daf.butler.Butler` 

198 The butler to use. 

199 **kwargs 

200 Additional keyword arguments forwarded to the `Task` constructor. 

201 """ 

202 

203 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any): 

204 Task.__init__(self, config=config, **kwargs) 

205 self.butler = butler 

206 self.instrumentMap: Dict[str, Instrument] = {} 

207 

208 ConfigClass = ComputeVisitRegionsConfig 

209 

210 _DefaultName = "computeVisitRegions" 

211 

212 registry = makeRegistry( 

213 doc=( 

214 "Registry of algorithms for computing on-sky regions for visits " 

215 "and visit+detector combinations." 

216 ), 

217 configBaseType=ComputeVisitRegionsConfig, 

218 ) 

219 

220 def getInstrument(self, instrumentName: str) -> Instrument: 

221 """Retrieve an `~lsst.obs.base.Instrument` associated with this 

222 instrument name. 

223 

224 Parameters 

225 ---------- 

226 instrumentName : `str` 

227 The name of the instrument. 

228 

229 Returns 

230 ------- 

231 instrument : `~lsst.obs.base.Instrument` 

232 The associated instrument object. 

233 

234 Notes 

235 ----- 

236 The result is cached. 

237 """ 

238 instrument = self.instrumentMap.get(instrumentName) 

239 if instrument is None: 

240 instrument = Instrument.fromName(instrumentName, self.butler.registry) 

241 self.instrumentMap[instrumentName] = instrument 

242 return instrument 

243 

244 @abstractmethod 

245 def compute( 

246 self, visit: VisitDefinitionData, *, collections: Any = None 

247 ) -> Tuple[Region, Dict[int, Region]]: 

248 """Compute regions for the given visit and all detectors in that visit. 

249 

250 Parameters 

251 ---------- 

252 visit : `VisitDefinitionData` 

253 Struct describing the visit and the exposures associated with it. 

254 collections : Any, optional 

255 Collections to be searched for raws and camera geometry, overriding 

256 ``self.butler.collections``. 

257 Can be any of the types supported by the ``collections`` argument 

258 to butler construction. 

259 

260 Returns 

261 ------- 

262 visitRegion : `lsst.sphgeom.Region` 

263 Region for the full visit. 

264 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ] 

265 Dictionary mapping detector ID to the region for that detector. 

266 Should include all detectors in the visit. 

267 """ 

268 raise NotImplementedError() 

269 

270 

271class DefineVisitsConfig(Config): 

272 groupExposures = GroupExposuresTask.registry.makeField( 

273 doc="Algorithm for grouping exposures into visits.", 

274 default="one-to-one", 

275 ) 

276 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField( 

277 doc="Algorithm from computing visit and visit+detector regions.", 

278 default="single-raw-wcs", 

279 ) 

280 ignoreNonScienceExposures = Field( 

281 doc=( 

282 "If True, silently ignore input exposures that do not have " 

283 "observation_type=SCIENCE. If False, raise an exception if one " 

284 "encountered." 

285 ), 

286 dtype=bool, 

287 optional=False, 

288 default=True, 

289 ) 

290 

291 

292class DefineVisitsTask(Task): 

293 """Driver Task for defining visits (and their spatial regions) in Gen3 

294 Butler repositories. 

295 

296 Parameters 

297 ---------- 

298 config : `DefineVisitsConfig` 

299 Configuration for the task. 

300 butler : `~lsst.daf.butler.Butler` 

301 Writeable butler instance. Will be used to read `raw.wcs` and `camera` 

302 datasets and insert/sync dimension data. 

303 **kwargs 

304 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task` 

305 constructor. 

306 

307 Notes 

308 ----- 

309 Each instance of `DefineVisitsTask` reads from / writes to the same Butler. 

310 Each invocation of `DefineVisitsTask.run` processes an independent group of 

311 exposures into one or more new vists, all belonging to the same visit 

312 system and instrument. 

313 

314 The actual work of grouping exposures and computing regions is delegated 

315 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`), 

316 respectively. The defaults are to create one visit for every exposure, 

317 and to use exactly one (arbitrary) detector-level raw dataset's WCS along 

318 with camera geometry to compute regions for all detectors. Other 

319 implementations can be created and configured for instruments for which 

320 these choices are unsuitable (e.g. because visits and exposures are not 

321 one-to-one, or because ``raw.wcs`` datasets for different detectors may not 

322 be consistent with camera geomery). 

323 

324 It is not necessary in general to ingest all raws for an exposure before 

325 defining a visit that includes the exposure; this depends entirely on the 

326 `ComputeVisitRegionTask` subclass used. For the default configuration, 

327 a single raw for each exposure is sufficient. 

328 

329 Defining the same visit the same way multiple times (e.g. via multiple 

330 invocations of this task on the same exposures, with the same 

331 configuration) is safe, but it may be inefficient, as most of the work must 

332 be done before new visits can be compared to existing visits. 

333 """ 

334 

335 def __init__(self, config: DefineVisitsConfig, *, butler: Butler, **kwargs: Any): 

336 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here. 

337 super().__init__(config, **kwargs) 

338 self.butler = butler 

339 self.universe = self.butler.registry.dimensions 

340 self.progress = Progress("obs.base.DefineVisitsTask") 

341 self.makeSubtask("groupExposures") 

342 self.makeSubtask("computeVisitRegions", butler=self.butler) 

343 

344 def _reduce_kwargs(self) -> dict: 

345 # Add extra parameters to pickle 

346 return dict(**super()._reduce_kwargs(), butler=self.butler) 

347 

348 ConfigClass: ClassVar[Config] = DefineVisitsConfig 

349 

350 _DefaultName: ClassVar[str] = "defineVisits" 

351 

352 groupExposures: GroupExposuresTask 

353 computeVisitRegions: ComputeVisitRegionsTask 

354 

355 def _buildVisitRecords( 

356 self, definition: VisitDefinitionData, *, collections: Any = None 

357 ) -> _VisitRecords: 

358 """Build the DimensionRecords associated with a visit. 

359 

360 Parameters 

361 ---------- 

362 definition : `VisitDefinition` 

363 Struct with identifiers for the visit and records for its 

364 constituent exposures. 

365 collections : Any, optional 

366 Collections to be searched for raws and camera geometry, overriding 

367 ``self.butler.collections``. 

368 Can be any of the types supported by the ``collections`` argument 

369 to butler construction. 

370 

371 Results 

372 ------- 

373 records : `_VisitRecords` 

374 Struct containing DimensionRecords for the visit, including 

375 associated dimension elements. 

376 """ 

377 # Compute all regions. 

378 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute( 

379 definition, collections=collections 

380 ) 

381 # Aggregate other exposure quantities. 

382 timespan = Timespan( 

383 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)), 

384 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)), 

385 ) 

386 exposure_time = _reduceOrNone(operator.add, (e.exposure_time for e in definition.exposures)) 

387 physical_filter = _reduceOrNone(_value_if_equal, (e.physical_filter for e in definition.exposures)) 

388 target_name = _reduceOrNone(_value_if_equal, (e.target_name for e in definition.exposures)) 

389 science_program = _reduceOrNone(_value_if_equal, (e.science_program for e in definition.exposures)) 

390 

391 # observing day for a visit is defined by the earliest observation 

392 # of the visit 

393 observing_day = _reduceOrNone(min, (e.day_obs for e in definition.exposures)) 

394 observation_reason = _reduceOrNone( 

395 _value_if_equal, (e.observation_reason for e in definition.exposures) 

396 ) 

397 if observation_reason is None: 

398 # Be explicit about there being multiple reasons 

399 # MyPy can't really handle DimensionRecord fields as 

400 # DimensionRecord classes are dynamically defined; easiest to just 

401 # shush it when it complains. 

402 observation_reason = "various" # type: ignore 

403 

404 # Use the mean zenith angle as an approximation 

405 zenith_angle = _reduceOrNone(operator.add, (e.zenith_angle for e in definition.exposures)) 

406 if zenith_angle is not None: 

407 zenith_angle /= len(definition.exposures) 

408 

409 # Construct the actual DimensionRecords. 

410 return _VisitRecords( 

411 visit=self.universe["visit"].RecordClass( 

412 instrument=definition.instrument, 

413 id=definition.id, 

414 name=definition.name, 

415 physical_filter=physical_filter, 

416 target_name=target_name, 

417 science_program=science_program, 

418 observation_reason=observation_reason, 

419 day_obs=observing_day, 

420 zenith_angle=zenith_angle, 

421 visit_system=self.groupExposures.getVisitSystem()[0], 

422 exposure_time=exposure_time, 

423 timespan=timespan, 

424 region=visitRegion, 

425 # TODO: no seeing value in exposure dimension records, so we 

426 # can't set that here. But there are many other columns that 

427 # both dimensions should probably have as well. 

428 ), 

429 visit_definition=[ 

430 self.universe["visit_definition"].RecordClass( 

431 instrument=definition.instrument, 

432 visit=definition.id, 

433 exposure=exposure.id, 

434 visit_system=self.groupExposures.getVisitSystem()[0], 

435 ) 

436 for exposure in definition.exposures 

437 ], 

438 visit_detector_region=[ 

439 self.universe["visit_detector_region"].RecordClass( 

440 instrument=definition.instrument, 

441 visit=definition.id, 

442 detector=detectorId, 

443 region=detectorRegion, 

444 ) 

445 for detectorId, detectorRegion in visitDetectorRegions.items() 

446 ], 

447 ) 

448 

449 def run( 

450 self, 

451 dataIds: Iterable[DataId], 

452 *, 

453 collections: Optional[str] = None, 

454 update_records: bool = False, 

455 ) -> None: 

456 """Add visit definitions to the registry for the given exposures. 

457 

458 Parameters 

459 ---------- 

460 dataIds : `Iterable` [ `dict` or `DataCoordinate` ] 

461 Exposure-level data IDs. These must all correspond to the same 

462 instrument, and are expected to be on-sky science exposures. 

463 collections : Any, optional 

464 Collections to be searched for raws and camera geometry, overriding 

465 ``self.butler.collections``. 

466 Can be any of the types supported by the ``collections`` argument 

467 to butler construction. 

468 update_records : `bool`, optional 

469 If `True` (`False` is default), update existing visit records that 

470 conflict with the new ones instead of rejecting them (and when this 

471 occurs, update visit_detector_region as well). THIS IS AN ADVANCED 

472 OPTION THAT SHOULD ONLY BE USED TO FIX REGIONS AND/OR METADATA THAT 

473 ARE KNOWN TO BE BAD, AND IT CANNOT BE USED TO REMOVE EXPOSURES OR 

474 DETECTORS FROM A VISIT. 

475 

476 Raises 

477 ------ 

478 lsst.daf.butler.registry.ConflictingDefinitionError 

479 Raised if a visit ID conflict is detected and the existing visit 

480 differs from the new one. 

481 """ 

482 # Normalize, expand, and deduplicate data IDs. 

483 self.log.info("Preprocessing data IDs.") 

484 dimensions = DimensionGraph(self.universe, names=["exposure"]) 

485 data_id_set: Set[DataCoordinate] = { 

486 self.butler.registry.expandDataId(d, graph=dimensions) for d in dataIds 

487 } 

488 if not data_id_set: 

489 raise RuntimeError("No exposures given.") 

490 # Extract exposure DimensionRecords, check that there's only one 

491 # instrument in play, and check for non-science exposures. 

492 exposures = [] 

493 instruments = set() 

494 for dataId in data_id_set: 

495 record = dataId.records["exposure"] 

496 assert record is not None, "Guaranteed by expandDataIds call earlier." 

497 if record.tracking_ra is None or record.tracking_dec is None or record.sky_angle is None: 

498 if self.config.ignoreNonScienceExposures: 

499 continue 

500 else: 

501 raise RuntimeError( 

502 f"Input exposure {dataId} has observation_type " 

503 f"{record.observation_type}, but is not on sky." 

504 ) 

505 instruments.add(dataId["instrument"]) 

506 exposures.append(record) 

507 if not exposures: 

508 self.log.info("No on-sky exposures found after filtering.") 

509 return 

510 if len(instruments) > 1: 

511 raise RuntimeError( 

512 f"All data IDs passed to DefineVisitsTask.run must be " 

513 f"from the same instrument; got {instruments}." 

514 ) 

515 (instrument,) = instruments 

516 # Ensure the visit_system our grouping algorithm uses is in the 

517 # registry, if it wasn't already. 

518 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem() 

519 self.log.info("Registering visit_system %d: %s.", visitSystemId, visitSystemName) 

520 self.butler.registry.syncDimensionData( 

521 "visit_system", {"instrument": instrument, "id": visitSystemId, "name": visitSystemName} 

522 ) 

523 # Group exposures into visits, delegating to subtask. 

524 self.log.info("Grouping %d exposure(s) into visits.", len(exposures)) 

525 definitions = list(self.groupExposures.group(exposures)) 

526 # Iterate over visits, compute regions, and insert dimension data, one 

527 # transaction per visit. If a visit already exists, we skip all other 

528 # inserts. 

529 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions)) 

530 for visitDefinition in self.progress.wrap( 

531 definitions, total=len(definitions), desc="Computing regions and inserting visits" 

532 ): 

533 visitRecords = self._buildVisitRecords(visitDefinition, collections=collections) 

534 with self.butler.registry.transaction(): 

535 inserted_or_updated = self.butler.registry.syncDimensionData( 

536 "visit", 

537 visitRecords.visit, 

538 update=update_records, 

539 ) 

540 if inserted_or_updated: 

541 if inserted_or_updated is True: 

542 # This is a new visit, not an update to an existing 

543 # one, so insert visit definition. 

544 # We don't allow visit definitions to change even when 

545 # asked to update, because we'd have to delete the old 

546 # visit_definitions first and also worry about what 

547 # this does to datasets that already use the visit. 

548 self.butler.registry.insertDimensionData( 

549 "visit_definition", *visitRecords.visit_definition 

550 ) 

551 # [Re]Insert visit_detector_region records for both inserts 

552 # and updates, because we do allow updating to affect the 

553 # region calculations. 

554 self.butler.registry.insertDimensionData( 

555 "visit_detector_region", *visitRecords.visit_detector_region, replace=update_records 

556 ) 

557 

558 

559_T = TypeVar("_T") 

560 

561 

562def _reduceOrNone(func: Callable[[_T, _T], Optional[_T]], iterable: Iterable[Optional[_T]]) -> Optional[_T]: 

563 """Apply a binary function to pairs of elements in an iterable until a 

564 single value is returned, but return `None` if any element is `None` or 

565 there are no elements. 

566 """ 

567 r: Optional[_T] = None 

568 for v in iterable: 

569 if v is None: 

570 return None 

571 if r is None: 

572 r = v 

573 else: 

574 r = func(r, v) 

575 return r 

576 

577 

578def _value_if_equal(a: _T, b: _T) -> Optional[_T]: 

579 """Return either argument if they are equal, or `None` if they are not.""" 

580 return a if a == b else None 

581 

582 

583class _GroupExposuresOneToOneConfig(GroupExposuresConfig): 

584 visitSystemId = Field( 

585 doc="Integer ID of the visit_system implemented by this grouping algorithm.", 

586 dtype=int, 

587 default=0, 

588 ) 

589 visitSystemName = Field( 

590 doc="String name of the visit_system implemented by this grouping algorithm.", 

591 dtype=str, 

592 default="one-to-one", 

593 ) 

594 

595 

596@registerConfigurable("one-to-one", GroupExposuresTask.registry) 

597class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta): 

598 """An exposure grouping algorithm that simply defines one visit for each 

599 exposure, reusing the exposures identifiers for the visit. 

600 """ 

601 

602 ConfigClass = _GroupExposuresOneToOneConfig 

603 

604 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

605 # Docstring inherited from GroupExposuresTask. 

606 for exposure in exposures: 

607 yield VisitDefinitionData( 

608 instrument=exposure.instrument, 

609 id=exposure.id, 

610 name=exposure.obs_id, 

611 exposures=[exposure], 

612 ) 

613 

614 def getVisitSystem(self) -> Tuple[int, str]: 

615 # Docstring inherited from GroupExposuresTask. 

616 return (self.config.visitSystemId, self.config.visitSystemName) 

617 

618 

619class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig): 

620 visitSystemId = Field( 

621 doc="Integer ID of the visit_system implemented by this grouping algorithm.", 

622 dtype=int, 

623 default=1, 

624 ) 

625 visitSystemName = Field( 

626 doc="String name of the visit_system implemented by this grouping algorithm.", 

627 dtype=str, 

628 default="by-group-metadata", 

629 ) 

630 

631 

632@registerConfigurable("by-group-metadata", GroupExposuresTask.registry) 

633class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta): 

634 """An exposure grouping algorithm that uses exposure.group_name and 

635 exposure.group_id. 

636 

637 This algorithm _assumes_ exposure.group_id (generally populated from 

638 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique, 

639 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't, 

640 it will be impossible to ever use both this grouping algorithm and the 

641 one-to-one algorithm for a particular camera in the same data repository. 

642 """ 

643 

644 ConfigClass = _GroupExposuresByGroupMetadataConfig 

645 

646 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]: 

647 # Docstring inherited from GroupExposuresTask. 

648 groups = defaultdict(list) 

649 for exposure in exposures: 

650 groups[exposure.group_name].append(exposure) 

651 for visitName, exposuresInGroup in groups.items(): 

652 instrument = exposuresInGroup[0].instrument 

653 visitId = exposuresInGroup[0].group_id 

654 assert all( 

655 e.group_id == visitId for e in exposuresInGroup 

656 ), "Grouping by exposure.group_name does not yield consistent group IDs" 

657 yield VisitDefinitionData( 

658 instrument=instrument, id=visitId, name=visitName, exposures=exposuresInGroup 

659 ) 

660 

661 def getVisitSystem(self) -> Tuple[int, str]: 

662 # Docstring inherited from GroupExposuresTask. 

663 return (self.config.visitSystemId, self.config.visitSystemName) 

664 

665 

666class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig): 

667 mergeExposures = Field( 

668 doc=( 

669 "If True, merge per-detector regions over all exposures in a " 

670 "visit (via convex hull) instead of using the first exposure and " 

671 "assuming its regions are valid for all others." 

672 ), 

673 dtype=bool, 

674 default=False, 

675 ) 

676 detectorId = Field( 

677 doc=( 

678 "Load the WCS for the detector with this ID. If None, use an " 

679 "arbitrary detector (the first found in a query of the data " 

680 "repository for each exposure (or all exposures, if " 

681 "mergeExposures is True)." 

682 ), 

683 dtype=int, 

684 optional=True, 

685 default=None, 

686 ) 

687 requireVersionedCamera = Field( 

688 doc=( 

689 "If True, raise LookupError if version camera geometry cannot be " 

690 "loaded for an exposure. If False, use the nominal camera from " 

691 "the Instrument class instead." 

692 ), 

693 dtype=bool, 

694 optional=False, 

695 default=False, 

696 ) 

697 

698 

699@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry) 

700class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask): 

701 """A visit region calculator that uses a single raw WCS and a camera to 

702 project the bounding boxes of all detectors onto the sky, relating 

703 different detectors by their positions in focal plane coordinates. 

704 

705 Notes 

706 ----- 

707 Most instruments should have their raw WCSs determined from a combination 

708 of boresight angle, rotator angle, and camera geometry, and hence this 

709 algorithm should produce stable results regardless of which detector the 

710 raw corresponds to. If this is not the case (e.g. because a per-file FITS 

711 WCS is used instead), either the ID of the detector should be fixed (see 

712 the ``detectorId`` config parameter) or a different algorithm used. 

713 """ 

714 

715 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig 

716 

717 def computeExposureBounds( 

718 self, exposure: DimensionRecord, *, collections: Any = None 

719 ) -> Dict[int, List[UnitVector3d]]: 

720 """Compute the lists of unit vectors on the sphere that correspond to 

721 the sky positions of detector corners. 

722 

723 Parameters 

724 ---------- 

725 exposure : `DimensionRecord` 

726 Dimension record for the exposure. 

727 collections : Any, optional 

728 Collections to be searched for raws and camera geometry, overriding 

729 ``self.butler.collections``. 

730 Can be any of the types supported by the ``collections`` argument 

731 to butler construction. 

732 

733 Returns 

734 ------- 

735 bounds : `dict` 

736 Dictionary mapping detector ID to a list of unit vectors on the 

737 sphere representing that detector's corners projected onto the sky. 

738 """ 

739 if collections is None: 

740 collections = self.butler.collections 

741 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections) 

742 if not versioned and self.config.requireVersionedCamera: 

743 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.") 

744 

745 # Derive WCS from boresight information -- if available in registry 

746 use_registry = True 

747 try: 

748 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees) 

749 radec = lsst.geom.SpherePoint( 

750 lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees), 

751 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees), 

752 ) 

753 except AttributeError: 

754 use_registry = False 

755 

756 if use_registry: 

757 if self.config.detectorId is None: 

758 detectorId = next(camera.getIdIter()) 

759 else: 

760 detectorId = self.config.detectorId 

761 wcsDetector = camera[detectorId] 

762 

763 # Ask the raw formatter to create the relevant WCS 

764 # This allows flips to be taken into account 

765 instrument = self.getInstrument(exposure.instrument) 

766 rawFormatter = instrument.getRawFormatter({"detector": detectorId}) 

767 

768 try: 

769 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector) # type: ignore 

770 except AttributeError: 

771 raise TypeError( 

772 f"Raw formatter is {get_full_type_name(rawFormatter)} but visit" 

773 " definition requires it to support 'makeRawSkyWcsFromBoresight'" 

774 ) from None 

775 else: 

776 if self.config.detectorId is None: 

777 wcsRefsIter = self.butler.registry.queryDatasets( 

778 "raw.wcs", dataId=exposure.dataId, collections=collections 

779 ) 

780 if not wcsRefsIter: 

781 raise LookupError( 

782 f"No raw.wcs datasets found for data ID {exposure.dataId} " 

783 f"in collections {collections}." 

784 ) 

785 wcsRef = next(iter(wcsRefsIter)) 

786 wcsDetector = camera[wcsRef.dataId["detector"]] 

787 wcs = self.butler.getDirect(wcsRef) 

788 else: 

789 wcsDetector = camera[self.config.detectorId] 

790 wcs = self.butler.get( 

791 "raw.wcs", 

792 dataId=exposure.dataId, 

793 detector=self.config.detectorId, 

794 collections=collections, 

795 ) 

796 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform()) 

797 bounds = {} 

798 for detector in camera: 

799 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky) 

800 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners() 

801 bounds[detector.getId()] = [ 

802 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners) 

803 ] 

804 return bounds 

805 

806 def compute( 

807 self, visit: VisitDefinitionData, *, collections: Any = None 

808 ) -> Tuple[Region, Dict[int, Region]]: 

809 # Docstring inherited from ComputeVisitRegionsTask. 

810 if self.config.mergeExposures: 

811 detectorBounds: Dict[int, List[UnitVector3d]] = defaultdict(list) 

812 for exposure in visit.exposures: 

813 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections) 

814 for detectorId, bounds in exposureDetectorBounds.items(): 

815 detectorBounds[detectorId].extend(bounds) 

816 else: 

817 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections) 

818 visitBounds = [] 

819 detectorRegions = {} 

820 for detectorId, bounds in detectorBounds.items(): 

821 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds) 

822 visitBounds.extend(bounds) 

823 return ConvexPolygon.convexHull(visitBounds), detectorRegions