Coverage for python / lsst / obs / base / defineVisits.py: 24%

437 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-14 23:50 +0000

1# This file is part of obs_base. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = [ 

25 "ComputeVisitRegionsConfig", 

26 "ComputeVisitRegionsTask", 

27 "DefineVisitsConfig", 

28 "DefineVisitsTask", 

29 "GroupExposuresConfig", 

30 "GroupExposuresTask", 

31 "VisitDefinitionData", 

32 "VisitSystem", 

33] 

34 

35import cmath 

36import dataclasses 

37import enum 

38import math 

39import operator 

40from abc import ABCMeta, abstractmethod 

41from collections import defaultdict 

42from collections.abc import Callable, Iterable, Sequence 

43from typing import Any, ClassVar, TypeVar, cast 

44 

45import lsst.geom 

46from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS 

47from lsst.daf.butler import Butler, DataId, DimensionRecord, Progress, Timespan 

48from lsst.daf.butler.registry import ConflictingDefinitionError 

49from lsst.geom import Box2D 

50from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable 

51from lsst.pipe.base import Struct, Task 

52from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d 

53from lsst.utils.introspection import get_full_type_name 

54 

55from ._instrument import Instrument, loadCamera 

56 

57 

58class VisitSystem(enum.Enum): 

59 """Enumeration used to label different visit systems.""" 

60 

61 ONE_TO_ONE = 0 

62 """Each exposure is assigned to its own visit.""" 

63 

64 BY_GROUP_METADATA = 1 

65 """Visit membership is defined by the value of the group dimension or, for 

66 older dimension universes, exposure.group_id.""" 

67 

68 BY_SEQ_START_END = 2 

69 """Visit membership is defined by the values of the ``exposure.day_obs``, 

70 ``exposure.seq_start``, and ``exposure.seq_end`` values. 

71 """ 

72 

73 @classmethod 

74 def all(cls) -> frozenset[VisitSystem]: 

75 """Return a `frozenset` containing all members.""" 

76 return frozenset(cls.__members__.values()) 

77 

78 @classmethod 

79 def from_name(cls, external_name: str) -> VisitSystem: 

80 """Construct the enumeration from given name.""" 

81 name = external_name.upper() 

82 name = name.replace("-", "_") 

83 try: 

84 return cls.__members__[name] 

85 except KeyError: 

86 raise KeyError(f"Visit system named '{external_name}' not known.") from None 

87 

88 @classmethod 

89 def from_names(cls, names: Iterable[str] | None) -> frozenset[VisitSystem]: 

90 """Return a `frozenset` of all the visit systems matching the supplied 

91 names. 

92 

93 Parameters 

94 ---------- 

95 names : iterable of `str`, or `None` 

96 Names of visit systems. Case insensitive. If `None` or empty, all 

97 the visit systems are returned. 

98 

99 Returns 

100 ------- 

101 systems : `frozenset` of `VisitSystem` 

102 The matching visit systems. 

103 """ 

104 if not names: 

105 return cls.all() 

106 

107 return frozenset({cls.from_name(name) for name in names}) 

108 

109 def __str__(self) -> str: 

110 name = self.name.lower() 

111 name = name.replace("_", "-") 

112 return name 

113 

114 

115@dataclasses.dataclass 

116class VisitDefinitionData: 

117 """Struct representing a group of exposures that will be used to define a 

118 visit. 

119 """ 

120 

121 instrument: str 

122 """Name of the instrument this visit will be associated with. 

123 """ 

124 

125 id: int 

126 """Integer ID of the visit. 

127 

128 This must be unique across all visit systems for the instrument. 

129 """ 

130 

131 name: str 

132 """String name for the visit. 

133 

134 This must be unique across all visit systems for the instrument. 

135 """ 

136 

137 visit_systems: set[VisitSystem] 

138 """All the visit systems associated with this visit.""" 

139 

140 exposures: list[DimensionRecord] = dataclasses.field(default_factory=list) 

141 """Dimension records for the exposures that are part of this visit. 

142 """ 

143 

144 

145@dataclasses.dataclass 

146class _VisitRecords: 

147 """Struct containing the dimension records associated with a visit.""" 

148 

149 visit: DimensionRecord 

150 """Record for the 'visit' dimension itself. 

151 """ 

152 

153 visit_definition: list[DimensionRecord] 

154 """Records for 'visit_definition', which relates 'visit' to 'exposure'. 

155 """ 

156 

157 visit_detector_region: list[DimensionRecord] 

158 """Records for 'visit_detector_region', which associates the combination 

159 of a 'visit' and a 'detector' with a region on the sky. 

160 """ 

161 

162 visit_system_membership: list[DimensionRecord] 

163 """Records relating visits to an associated visit system.""" 

164 

165 

166class GroupExposuresConfig(Config): 

167 """Configure exposure grouping.""" 

168 

169 

170class GroupExposuresTask(Task, metaclass=ABCMeta): 

171 """Abstract base class for the subtask of `DefineVisitsTask` that is 

172 responsible for grouping exposures into visits. 

173 

174 Subclasses should be registered with `GroupExposuresTask.registry` to 

175 enable use by `DefineVisitsTask`, and should generally correspond to a 

176 particular 'visit_system' dimension value. They are also responsible for 

177 defining visit IDs and names that are unique across all visit systems in 

178 use by an instrument. 

179 

180 Parameters 

181 ---------- 

182 config : `GroupExposuresConfig` 

183 Configuration information. 

184 **kwargs 

185 Additional keyword arguments forwarded to the `lsst.pipe.base.Task` 

186 constructor. 

187 """ 

188 

189 def __init__(self, config: GroupExposuresConfig, **kwargs: Any): 

190 Task.__init__(self, config=config, **kwargs) 

191 

192 ConfigClass = GroupExposuresConfig 

193 

194 _DefaultName = "groupExposures" 

195 

196 registry = makeRegistry( 

197 doc="Registry of algorithms for grouping exposures into visits.", 

198 configBaseType=GroupExposuresConfig, 

199 ) 

200 

201 @abstractmethod 

202 def find_missing( 

203 self, exposures: list[DimensionRecord], registry: lsst.daf.butler.Registry 

204 ) -> list[DimensionRecord]: 

205 """Determine, if possible, which exposures might be missing. 

206 

207 Parameters 

208 ---------- 

209 exposures : `list` of `lsst.daf.butler.DimensionRecord` 

210 The exposure records to analyze. 

211 registry : `lsst.daf.butler.Registry` 

212 A butler registry that contains these exposure records. 

213 

214 Returns 

215 ------- 

216 missing : `list` of `lsst.daf.butler.DimensionRecord` 

217 Any exposure records present in registry that were related to 

218 the given exposures but were missing from that list and deemed 

219 to be relevant. 

220 

221 Notes 

222 ----- 

223 Only some grouping schemes are able to find missing exposures. It 

224 is acceptable to return an empty list. 

225 """ 

226 raise NotImplementedError() 

227 

228 @abstractmethod 

229 def group_exposures(self, exposures: list[DimensionRecord]) -> dict[Any, list[DimensionRecord]]: 

230 """Group the exposures in a way most natural for this visit definition. 

231 

232 Parameters 

233 ---------- 

234 exposures : `list` of `lsst.daf.butler.DimensionRecord` 

235 The exposure records to group. 

236 

237 Returns 

238 ------- 

239 groups : `dict` [Any, `list` [ `lsst.daf.butler.DimensionRecord` ] ] 

240 Groupings of exposure records. The key type is relevant to the 

241 specific visit definition and could be a string or a tuple. 

242 """ 

243 raise NotImplementedError() 

244 

245 @abstractmethod 

246 def group( 

247 self, exposures: list[DimensionRecord], instrument: Instrument 

248 ) -> Iterable[VisitDefinitionData]: 

249 """Group the given exposures into visits. 

250 

251 Parameters 

252 ---------- 

253 exposures : `list` [ `lsst.daf.butler.DimensionRecord` ] 

254 DimensionRecords (for the 'exposure' dimension) describing the 

255 exposures to group. 

256 instrument : `~lsst.obs.base.Instrument` 

257 Instrument specification that can be used to optionally support 

258 some visit ID definitions. 

259 

260 Returns 

261 ------- 

262 visits : `~collections.abc.Iterable` [ `VisitDefinitionData` ] 

263 Structs identifying the visits and the exposures associated with 

264 them. This may be an iterator or a container. 

265 """ 

266 raise NotImplementedError() 

267 

268 def getVisitSystems(self) -> set[VisitSystem]: 

269 """Return identifiers for the 'visit_system' dimension this 

270 algorithm implements. 

271 

272 Returns 

273 ------- 

274 visit_systems : `set` [`VisitSystem`] 

275 The visit systems used by this algorithm. 

276 """ 

277 raise NotImplementedError() 

278 

279 

280class ComputeVisitRegionsConfig(Config): 

281 """Configure visit region calculations.""" 

282 

283 padding: Field[int] = Field( 

284 dtype=int, 

285 default=250, 

286 doc=( 

287 "Pad raw image bounding boxes with specified number of pixels " 

288 "when calculating their (conservatively large) region on the " 

289 "sky. Note that the config value for pixelMargin of the " 

290 "reference object loaders in meas_algorithms should be <= " 

291 "the value set here." 

292 ), 

293 ) 

294 

295 

296class ComputeVisitRegionsTask(Task, metaclass=ABCMeta): 

297 """Abstract base class for the subtask of `DefineVisitsTask` that is 

298 responsible for extracting spatial regions for visits and visit+detector 

299 combinations. 

300 

301 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to 

302 enable use by `DefineVisitsTask`. 

303 

304 Parameters 

305 ---------- 

306 config : `ComputeVisitRegionsConfig` 

307 Configuration information. 

308 butler : `lsst.daf.butler.Butler` 

309 The butler to use. 

310 **kwargs 

311 Additional keyword arguments forwarded to the `~lsst.pipe.base.Task` 

312 constructor. 

313 """ 

314 

315 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any): 

316 Task.__init__(self, config=config, **kwargs) 

317 self.butler = butler 

318 self.instrumentMap: dict[str, Instrument] = {} 

319 

320 ConfigClass = ComputeVisitRegionsConfig 

321 

322 _DefaultName = "computeVisitRegions" 

323 

324 registry = makeRegistry( 

325 doc="Registry of algorithms for computing on-sky regions for visits and visit+detector combinations.", 

326 configBaseType=ComputeVisitRegionsConfig, 

327 ) 

328 

329 def getInstrument(self, instrumentName: str) -> Instrument: 

330 """Retrieve an `~lsst.obs.base.Instrument` associated with this 

331 instrument name. 

332 

333 Parameters 

334 ---------- 

335 instrumentName : `str` 

336 The name of the instrument. 

337 

338 Returns 

339 ------- 

340 instrument : `~lsst.obs.base.Instrument` 

341 The associated instrument object. 

342 

343 Notes 

344 ----- 

345 The result is cached. 

346 """ 

347 instrument = self.instrumentMap.get(instrumentName) 

348 if instrument is None: 

349 instrument = Instrument.fromName(instrumentName, self.butler.registry) 

350 self.instrumentMap[instrumentName] = instrument 

351 return instrument 

352 

353 @abstractmethod 

354 def compute( 

355 self, 

356 visit: VisitDefinitionData, 

357 *, 

358 collections: Sequence[str] | str | None = None, 

359 ) -> tuple[Region, dict[int, Region]]: 

360 """Compute regions for the given visit and all detectors in that visit. 

361 

362 Parameters 

363 ---------- 

364 visit : `VisitDefinitionData` 

365 Struct describing the visit and the exposures associated with it. 

366 collections : `collections.abc.Sequence` [ `str` ] or `str` or `None` 

367 Collections to be searched for camera geometry, overriding 

368 ``self.butler.collections.defaults``. Can be any of the types 

369 supported by the ``collections`` argument to butler construction. 

370 

371 Returns 

372 ------- 

373 visitRegion : `lsst.sphgeom.Region` 

374 Region for the full visit. 

375 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ] 

376 Dictionary mapping detector ID to the region for that detector. 

377 Should include all detectors in the visit. 

378 """ 

379 raise NotImplementedError() 

380 

381 

382class DefineVisitsConfig(Config): 

383 """Configure visit definition.""" 

384 

385 groupExposures = GroupExposuresTask.registry.makeField( 

386 doc="Algorithm for grouping exposures into visits.", 

387 default="one-to-one-and-by-counter", 

388 ) 

389 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField( 

390 doc="Algorithm from computing visit and visit+detector regions.", 

391 default="single-raw-wcs", 

392 ) 

393 ignoreNonScienceExposures: Field[bool] = Field( 

394 doc=( 

395 "If True, silently ignore input exposures that do not have " 

396 "observation_type=SCIENCE. If False, raise an exception if one " 

397 "encountered." 

398 ), 

399 dtype=bool, 

400 optional=False, 

401 default=True, 

402 ) 

403 updateObsCoreTable: Field[bool] = Field( 

404 doc=( 

405 "If True, update exposure regions in obscore table after visits " 

406 "are defined. If False, do not update obscore table." 

407 ), 

408 dtype=bool, 

409 default=True, 

410 ) 

411 

412 

413class DefineVisitsTask(Task): 

414 """Driver Task for defining visits (and their spatial regions) in Gen3 

415 Butler repositories. 

416 

417 Parameters 

418 ---------- 

419 config : `DefineVisitsConfig` 

420 Configuration for the task. 

421 butler : `~lsst.daf.butler.Butler` 

422 Writeable butler instance. Will be used to read ``camera`` datasets 

423 and insert/sync dimension data. 

424 **kwargs 

425 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task` 

426 constructor. 

427 

428 Notes 

429 ----- 

430 Each instance of `DefineVisitsTask` reads from / writes to the same Butler. 

431 Each invocation of `DefineVisitsTask.run` processes an independent group of 

432 exposures into one or more new visits, all belonging to the same visit 

433 system and instrument. 

434 

435 The actual work of grouping exposures and computing regions is delegated to 

436 pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`), 

437 respectively. The defaults are to create one visit for every exposure, and 

438 to use exactly one (arbitrary) detector-level raw dataset's WCS along with 

439 camera geometry to compute regions for all detectors, but the raw WCS is 

440 recomputed from the ``exposure`` dimension record's rotation angle and 

441 boresight rather than by loading the ``raw.wcs`` dataset directly. Other 

442 implementations can be created and configured for instruments for which 

443 these choices are unsuitable (e.g. because visits and exposures are not 

444 one-to-one, or because ``raw.wcs`` datasets for different detectors may not 

445 be consistent with camera geometry). 

446 

447 Defining the same visit the same way multiple times (e.g. via multiple 

448 invocations of this task on the same exposures, with the same 

449 configuration) is safe, but it may be inefficient, as most of the work must 

450 be done before new visits can be compared to existing visits. 

451 """ 

452 

453 def __init__(self, config: DefineVisitsConfig, *, butler: Butler, **kwargs: Any): 

454 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here. 

455 super().__init__(config, **kwargs) 

456 self.butler = butler 

457 self.universe = self.butler.dimensions 

458 self.progress = Progress("obs.base.DefineVisitsTask") 

459 self.makeSubtask("groupExposures") 

460 self.makeSubtask("computeVisitRegions", butler=self.butler) 

461 

462 def _reduce_kwargs(self) -> dict: 

463 # Add extra parameters to pickle 

464 return dict(**super()._reduce_kwargs(), butler=self.butler) 

465 

466 ConfigClass: ClassVar[type[Config]] = DefineVisitsConfig 

467 

468 _DefaultName: ClassVar[str] = "defineVisits" 

469 

470 config: DefineVisitsConfig 

471 groupExposures: GroupExposuresTask 

472 computeVisitRegions: ComputeVisitRegionsTask 

473 

474 def _buildVisitRecords( 

475 self, definition: VisitDefinitionData, *, collections: Sequence[str] | str | None = None 

476 ) -> _VisitRecords: 

477 """Build the DimensionRecords associated with a visit. 

478 

479 Parameters 

480 ---------- 

481 definition : `VisitDefinitionData` 

482 Struct with identifiers for the visit and records for its 

483 constituent exposures. 

484 collections : `collections.abc.Sequence` [ `str` ] or `str` or `None` 

485 Collections to be searched for camera geometry, overriding 

486 ``self.butler.collections.defaults``. Can be any of the types 

487 supported by the ``collections`` argument to butler construction. 

488 

489 Returns 

490 ------- 

491 records : `_VisitRecords` 

492 Struct containing DimensionRecords for the visit, including 

493 associated dimension elements. 

494 """ 

495 dimension = self.universe["visit"] 

496 

497 # Some registries support additional items. 

498 supported = {meta.name for meta in dimension.metadata} 

499 

500 # Compute all regions. 

501 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute( 

502 definition, collections=collections 

503 ) 

504 # Aggregate other exposure quantities. 

505 timespan = Timespan( 

506 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)), 

507 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)), 

508 ) 

509 exposure_time = _reduceOrNone(operator.add, (e.exposure_time for e in definition.exposures)) 

510 physical_filter = _reduceOrNone(_value_if_equal, (e.physical_filter for e in definition.exposures)) 

511 target_name = _reduceOrNone(_value_if_equal, (e.target_name for e in definition.exposures)) 

512 science_program = _reduceOrNone(_value_if_equal, (e.science_program for e in definition.exposures)) 

513 

514 # observing day for a visit is defined by the earliest observation 

515 # of the visit 

516 observing_day = _reduceOrNone(min, (e.day_obs for e in definition.exposures)) 

517 observation_reason = _reduceOrNone( 

518 _value_if_equal, (e.observation_reason for e in definition.exposures) 

519 ) 

520 if observation_reason is None: 

521 # Be explicit about there being multiple reasons 

522 observation_reason = "various" 

523 

524 # Use the mean zenith angle as an approximation 

525 zenith_angle = _reduceOrNone(operator.add, (e.zenith_angle for e in definition.exposures)) 

526 if zenith_angle is not None: 

527 zenith_angle /= len(definition.exposures) 

528 

529 # New records that may not be supported. 

530 extras: dict[str, Any] = {} 

531 if "seq_num" in supported: 

532 extras["seq_num"] = _reduceOrNone(min, (e.seq_num for e in definition.exposures)) 

533 if "azimuth" in supported: 

534 # Must take into account 0/360 problem. 

535 extras["azimuth"] = _calc_mean_angle([e.azimuth for e in definition.exposures]) 

536 

537 # visit_system handling changed. This is the logic for visit/exposure 

538 # that has support for seq_start/seq_end. 

539 if "seq_num" in supported: 

540 # Map visit to exposure. 

541 visit_definition = [ 

542 self.universe["visit_definition"].RecordClass( 

543 instrument=definition.instrument, 

544 visit=definition.id, 

545 exposure=exposure.id, 

546 ) 

547 for exposure in definition.exposures 

548 ] 

549 

550 # Map visit to visit system. 

551 visit_system_membership = [] 

552 for visit_system in self.groupExposures.getVisitSystems(): 

553 if visit_system in definition.visit_systems: 

554 record = self.universe["visit_system_membership"].RecordClass( 

555 instrument=definition.instrument, 

556 visit=definition.id, 

557 visit_system=visit_system.value, 

558 ) 

559 visit_system_membership.append(record) 

560 

561 else: 

562 # The old approach can only handle one visit system at a time. 

563 # If we have been configured with multiple options, prefer the 

564 # one-to-one. 

565 visit_systems = self.groupExposures.getVisitSystems() 

566 if len(visit_systems) > 1: 

567 one_to_one = VisitSystem.from_name("one-to-one") 

568 if one_to_one not in visit_systems: 

569 raise ValueError( 

570 f"Multiple visit systems specified ({visit_systems}) for use with old" 

571 " dimension universe but unable to find one-to-one." 

572 ) 

573 visit_system = one_to_one 

574 else: 

575 visit_system = visit_systems.pop() 

576 

577 extras["visit_system"] = visit_system.value 

578 

579 # The old visit_definition included visit system. 

580 visit_definition = [ 

581 self.universe["visit_definition"].RecordClass( 

582 instrument=definition.instrument, 

583 visit=definition.id, 

584 exposure=exposure.id, 

585 visit_system=visit_system.value, 

586 ) 

587 for exposure in definition.exposures 

588 ] 

589 

590 # This concept does not exist in old schema. 

591 visit_system_membership = [] 

592 

593 # Construct the actual DimensionRecords. 

594 return _VisitRecords( 

595 visit=dimension.RecordClass( 

596 instrument=definition.instrument, 

597 id=definition.id, 

598 name=definition.name, 

599 physical_filter=physical_filter, 

600 target_name=target_name, 

601 science_program=science_program, 

602 observation_reason=observation_reason, 

603 day_obs=observing_day, 

604 zenith_angle=zenith_angle, 

605 exposure_time=exposure_time, 

606 timespan=timespan, 

607 region=visitRegion, 

608 # TODO: no seeing value in exposure dimension records, so we 

609 # can't set that here. But there are many other columns that 

610 # both dimensions should probably have as well. 

611 **extras, 

612 ), 

613 visit_definition=visit_definition, 

614 visit_system_membership=visit_system_membership, 

615 visit_detector_region=[ 

616 self.universe["visit_detector_region"].RecordClass( 

617 instrument=definition.instrument, 

618 visit=definition.id, 

619 detector=detectorId, 

620 region=detectorRegion, 

621 ) 

622 for detectorId, detectorRegion in visitDetectorRegions.items() 

623 ], 

624 ) 

625 

626 def run( 

627 self, 

628 dataIds_or_records: Iterable[DataId | DimensionRecord], 

629 *, 

630 collections: Sequence[str] | str | None = None, 

631 update_records: bool = False, 

632 incremental: bool = False, 

633 skip_conflicting: bool = False, 

634 ) -> Struct: 

635 """Add visit definitions to the registry for the given exposures. 

636 

637 Parameters 

638 ---------- 

639 dataIds_or_records : `~collections.abc.Iterable` [ `dict` or \ 

640 `~lsst.daf.butler.DataCoordinate` or \ 

641 `~lsst.daf.butler.DimensionRecord` ] 

642 Exposure-level data IDs or explicit exposure records. These must 

643 all correspond to the same instrument, and are expected to be 

644 on-sky science exposures. 

645 collections : `~collections.abc.Sequence` [ `str` ] or `str` or `None` 

646 Collections to be searched for camera geometry, overriding 

647 ``self.butler.collections.defaults``. Can be any of the types 

648 supported by the ``collections`` argument to butler construction. 

649 update_records : `bool`, optional 

650 If `True` (`False` is default), update existing ``visit`` records 

651 and ``visit_detector_region`` records. THIS IS AN ADVANCED OPTION 

652 THAT SHOULD ONLY BE USED TO FIX REGIONS AND/OR METADATA THAT ARE 

653 KNOWN TO BE BAD, AND IT CANNOT BE USED TO REMOVE EXPOSURES OR 

654 DETECTORS FROM A VISIT. 

655 incremental : `bool`, optional 

656 If `True` indicate that exposures are being ingested incrementally 

657 and visit definition will be run on partial visits. This will 

658 allow the ``visit`` record to be updated if it already exists, but 

659 (unlike ``update_records=True``) it will only update the 

660 ``visit_detector_region`` records if the ``visit`` record's region 

661 changes. If there is any risk that files are being ingested 

662 incrementally it is critical that this parameter is set to `True` 

663 and not to rely on ``update_records``. 

664 skip_conflicting : `bool`, optional 

665 If `True` do not raise an error if there is a change in an existing 

666 visit definition. This can be used if you solely want to define 

667 visits that were somehow missed previously. It has no effect if 

668 ``update_records`` is `True` or incremental mode is enabled. 

669 

670 Returns 

671 ------- 

672 result : `lsst.pipe.base.Struct` 

673 Structure with the following attributes (all `int`): 

674 

675 - n_visits: total number of visits defined 

676 - n_skipped: number of visits that were already present left alone 

677 - n_new: number of new visits inserted 

678 - n_fully_updated: number of existing visits fully updated 

679 - n_partially_updated: number of visits with non-geometry updates. 

680 

681 Raises 

682 ------ 

683 lsst.daf.butler.registry.ConflictingDefinitionError 

684 Raised if a visit ID conflict is detected and the existing visit 

685 differs from the new one. 

686 """ 

687 # Normalize, expand, and deduplicate data IDs. 

688 self.log.info("Preprocessing data IDs.") 

689 dimensions = self.universe.conform(["exposure"]) 

690 

691 exposure_records: set[DimensionRecord] = set() 

692 instruments: set[str] = set() 

693 instrument_cls_name: str | None = None 

694 instrument_record: DimensionRecord | None = None 

695 

696 # Go through the supplied dataset extracting records. 

697 # Check that only a single instrument is being used. 

698 for external in dataIds_or_records: 

699 if isinstance(external, DimensionRecord): 

700 record = external 

701 if str(record.definition) != "exposure": 

702 raise ValueError(f"Can only define visits from exposure records, not {record}.") 

703 else: 

704 data_id = self.butler.registry.expandDataId(external, dimensions=dimensions) 

705 exp_record = data_id.records["exposure"] 

706 assert exp_record is not None, "Guaranteed by expandDataIds call earlier." 

707 record = exp_record 

708 instrument_record = data_id.records["instrument"] 

709 

710 # LSSTCam data can assign ra/dec to flats, and dome-closed 

711 # engineering tests. Do not assign a visit if we know that 

712 # can_see_sky is False. Treat None as True for this test. 

713 can_see_sky = getattr(record, "can_see_sky", True) 

714 if ( 

715 record.tracking_ra is None 

716 or record.tracking_dec is None 

717 or record.sky_angle is None 

718 or can_see_sky is False 

719 ): 

720 if self.config.ignoreNonScienceExposures: 

721 continue 

722 else: 

723 raise RuntimeError( 

724 f"Input exposure {external} has observation_type " 

725 f"{record.observation_type}, but is not on sky." 

726 ) 

727 instrument_name = record.instrument 

728 instruments.add(instrument_name) 

729 exposure_records.add(record) 

730 # Downstream APIs expect a list of records, not a set. 

731 exposures = list(exposure_records) 

732 if not exposures: 

733 self.log.info("No on-sky exposures found after filtering.") 

734 return Struct(n_visits=0, n_skipped=0, n_new=0, n_partially_updated=0, n_fully_updated=0) 

735 if len(instruments) > 1: 

736 raise RuntimeError( 

737 "All data IDs passed to DefineVisitsTask.run must be " 

738 f"from the same instrument; got {instruments}." 

739 ) 

740 (instrument,) = instruments 

741 

742 # Might need the instrument class for later depending on universe 

743 # and grouping scheme. 

744 if instrument_cls_name is None: 

745 if instrument_record is None: 

746 # We were given a DimensionRecord instead of a DataCoordinate. 

747 

748 instrument_records = self.butler.query_dimension_records( 

749 "instrument", instrument=instrument, limit=1 

750 ) 

751 if len(instrument_records) != 1: 

752 raise RuntimeError( 

753 f"Instrument {instrument} found in dimension record but unknown to butler." 

754 ) 

755 instrument_record = instrument_records[0] 

756 instrument_cls_name = instrument_record.class_name 

757 assert instrument_cls_name is not None, "Instrument must be defined by this point" 

758 instrument_helper = Instrument.from_string(instrument_cls_name) 

759 

760 # Ensure the visit_system our grouping algorithm uses is in the 

761 # registry, if it wasn't already. 

762 visitSystems = self.groupExposures.getVisitSystems() 

763 for visitSystem in visitSystems: 

764 self.log.info("Registering visit_system %d: %s.", visitSystem.value, visitSystem) 

765 self.butler.registry.syncDimensionData( 

766 "visit_system", 

767 {"instrument": instrument, "id": visitSystem.value, "name": str(visitSystem)}, 

768 ) 

769 

770 # In true incremental we will be given the second snap on its 

771 # own on the assumption that the previous snap was already handled. 

772 # For correct grouping we need access to the other exposures in the 

773 # visit. 

774 if incremental: 

775 exposures.extend(self.groupExposures.find_missing(exposures, self.butler.registry)) 

776 

777 # Group exposures into visits, delegating to subtask. 

778 self.log.info("Grouping %d exposure(s) into visits.", len(exposures)) 

779 definitions = list(self.groupExposures.group(exposures, instrument_helper)) 

780 # Iterate over visits, compute regions, and insert dimension data, one 

781 # transaction per visit. If a visit already exists, we skip all other 

782 # inserts. 

783 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions)) 

784 n_skipped: int = 0 

785 n_new: int = 0 

786 n_fully_updated: int = 0 

787 n_partially_updated: int = 0 

788 for visitDefinition in self.progress.wrap( 

789 definitions, total=len(definitions), desc="Computing regions and inserting visits" 

790 ): 

791 visitRecords = self._buildVisitRecords(visitDefinition, collections=collections) 

792 with self.butler.registry.transaction(): 

793 try: 

794 inserted_or_updated = self.butler.registry.syncDimensionData( 

795 "visit", 

796 visitRecords.visit, 

797 update=(update_records or incremental), 

798 ) 

799 except ConflictingDefinitionError: 

800 if not skip_conflicting: 

801 raise 

802 inserted_or_updated = False 

803 if inserted_or_updated or update_records: 

804 if inserted_or_updated is True: 

805 # This is a new visit, not an update to an existing 

806 # one, so insert visit definition. 

807 # We don't allow visit definitions to change even when 

808 # asked to update, because we'd have to delete the old 

809 # visit_definitions first and also worry about what 

810 # this does to datasets that already use the visit. 

811 self.butler.registry.insertDimensionData( 

812 "visit_definition", *visitRecords.visit_definition 

813 ) 

814 if visitRecords.visit_system_membership: 

815 self.butler.registry.insertDimensionData( 

816 "visit_system_membership", *visitRecords.visit_system_membership 

817 ) 

818 elif incremental and len(visitRecords.visit_definition) > 1: 

819 # The visit record was modified. This could happen 

820 # if a multi-snap visit was redefined with an 

821 # additional snap so play it safe and allow for the 

822 # visit definition to be updated. We use update=False 

823 # here since there should not be any rows updated, 

824 # just additional rows added. update=True does not work 

825 # correctly with multiple records. In incremental mode 

826 # we assume that the caller wants the visit definition 

827 # to be updated and has no worries about provenance 

828 # with the previous definition. 

829 for definition in visitRecords.visit_definition: 

830 self.butler.registry.syncDimensionData("visit_definition", definition) 

831 if inserted_or_updated is True: 

832 # Insert visit-detector regions if the visit is new. 

833 self.butler.registry.insertDimensionData( 

834 "visit_detector_region", 

835 *visitRecords.visit_detector_region, 

836 replace=False, 

837 ) 

838 self.log.verbose( 

839 "Inserted %s visit_detector_region records for new visit %s.", 

840 len(visitRecords.visit_detector_region), 

841 visitRecords.visit.id, 

842 ) 

843 n_new += 1 

844 # Cast below is because MyPy can't determine that 

845 # inserted_or_updated can only be False if update_records 

846 # is True. 

847 elif update_records or "region" in cast(dict, inserted_or_updated): 

848 # Replace visit-detector regions if we were told to 

849 # update records explicitly, or if the visit region 

850 # changed in an incremental=True update. 

851 self.butler.registry.insertDimensionData( 

852 "visit_detector_region", 

853 *visitRecords.visit_detector_region, 

854 replace=True, 

855 ) 

856 self.log.verbose( 

857 "Re-inserted %s visit_detector_region records for updated visit %s.", 

858 len(visitRecords.visit_detector_region), 

859 visitRecords.visit.id, 

860 ) 

861 n_fully_updated += 1 

862 else: 

863 self.log.verbose( 

864 "Updated visit %s without modifying visit_detector_region records.", 

865 visitRecords.visit.id, 

866 ) 

867 n_partially_updated += 1 

868 

869 # Update obscore exposure records with region information 

870 # from corresponding visits. 

871 if self.config.updateObsCoreTable: 

872 if obscore_manager := self.butler.registry.obsCoreTableManager: 

873 obscore_updates: list[tuple[int, int, Region]] = [] 

874 exposure_ids = [rec.exposure for rec in visitRecords.visit_definition] 

875 for record in visitRecords.visit_detector_region: 

876 obscore_updates += [ 

877 (exposure, record.detector, record.region) for exposure in exposure_ids 

878 ] 

879 if obscore_updates: 

880 obscore_manager.update_exposure_regions(instrument, obscore_updates) 

881 else: 

882 self.log.verbose("Skipped already-existing visit %s.", visitRecords.visit.id) 

883 n_skipped += 1 

884 self.log.info( 

885 "Finished writing database records for %d visit(s): %s left unchanged, %s new, " 

886 "%s updated with new detector regions, %s updated without new detector regions.", 

887 len(definitions), 

888 n_skipped, 

889 n_new, 

890 n_fully_updated, 

891 n_partially_updated, 

892 ) 

893 return Struct( 

894 n_visits=len(definitions), 

895 n_skipped=n_skipped, 

896 n_new=n_new, 

897 n_fully_updated=n_fully_updated, 

898 n_partially_updated=n_partially_updated, 

899 ) 

900 

901 

902_T = TypeVar("_T") 

903 

904 

905def _reduceOrNone(func: Callable[[_T, _T], _T | None], iterable: Iterable[_T | None]) -> _T | None: 

906 """Apply a binary function to pairs of elements in an iterable until a 

907 single value is returned, but return `None` if any element is `None` or 

908 there are no elements. 

909 """ 

910 r: _T | None = None 

911 for v in iterable: 

912 if v is None: 

913 return None 

914 if r is None: 

915 r = v 

916 else: 

917 r = func(r, v) 

918 return r 

919 

920 

921def _value_if_equal(a: _T, b: _T) -> _T | None: 

922 """Return either argument if they are equal, or `None` if they are not.""" 

923 return a if a == b else None 

924 

925 

926def _calc_mean_angle(angles: list[float]) -> float: 

927 """Calculate the mean angle, taking into account 0/360 wrapping. 

928 

929 Parameters 

930 ---------- 

931 angles : `list` [`float`] 

932 Angles to average together, in degrees. 

933 

934 Returns 

935 ------- 

936 average : `float` 

937 Average angle in degrees. 

938 """ 

939 # Save on all the math if we only have one value. 

940 if len(angles) == 1: 

941 return angles[0] 

942 

943 # Convert polar coordinates of unit circle to complex values. 

944 # Average the complex values. 

945 # Convert back to a phase angle. 

946 return math.degrees(cmath.phase(sum(cmath.rect(1.0, math.radians(d)) for d in angles) / len(angles))) 

947 

948 

949class _GroupExposuresOneToOneConfig(GroupExposuresConfig): 

950 visitSystemId: Field[int] = Field( 

951 doc="Integer ID of the visit_system implemented by this grouping algorithm.", 

952 dtype=int, 

953 default=0, 

954 deprecated="No longer used. Replaced by enum.", 

955 ) 

956 visitSystemName: Field[str] = Field( 

957 doc="String name of the visit_system implemented by this grouping algorithm.", 

958 dtype=str, 

959 default="one-to-one", 

960 deprecated="No longer used. Replaced by enum.", 

961 ) 

962 

963 

964@registerConfigurable("one-to-one", GroupExposuresTask.registry) 

965class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta): 

966 """An exposure grouping algorithm that simply defines one visit for each 

967 exposure, reusing the exposures identifiers for the visit. 

968 """ 

969 

970 ConfigClass = _GroupExposuresOneToOneConfig 

971 

972 def find_missing( 

973 self, exposures: list[DimensionRecord], registry: lsst.daf.butler.Registry 

974 ) -> list[DimensionRecord]: 

975 # By definition no exposures can be missing. 

976 return [] 

977 

978 def group_exposures(self, exposures: list[DimensionRecord]) -> dict[Any, list[DimensionRecord]]: 

979 # No grouping. 

980 return {exposure.id: [exposure] for exposure in exposures} 

981 

982 def group( 

983 self, exposures: list[DimensionRecord], instrument: Instrument 

984 ) -> Iterable[VisitDefinitionData]: 

985 # Docstring inherited from GroupExposuresTask. 

986 visit_systems = {VisitSystem.from_name("one-to-one")} 

987 for exposure in exposures: 

988 yield VisitDefinitionData( 

989 instrument=exposure.instrument, 

990 id=exposure.id, 

991 name=exposure.obs_id, 

992 exposures=[exposure], 

993 visit_systems=visit_systems, 

994 ) 

995 

996 def getVisitSystems(self) -> set[VisitSystem]: 

997 # Docstring inherited from GroupExposuresTask. 

998 return set(VisitSystem.from_names(["one-to-one"])) 

999 

1000 

1001class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig): 

1002 visitSystemId: Field[int] = Field( 

1003 doc="Integer ID of the visit_system implemented by this grouping algorithm.", 

1004 dtype=int, 

1005 default=1, 

1006 deprecated="No longer used. Replaced by enum.", 

1007 ) 

1008 visitSystemName: Field[str] = Field( 

1009 doc="String name of the visit_system implemented by this grouping algorithm.", 

1010 dtype=str, 

1011 default="by-group-metadata", 

1012 deprecated="No longer used. Replaced by enum.", 

1013 ) 

1014 

1015 

1016@registerConfigurable("by-group-metadata", GroupExposuresTask.registry) 

1017class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta): 

1018 """An exposure grouping algorithm that uses the exposure group. 

1019 

1020 This algorithm uses the ``group`` dimension for modern universes and the 

1021 ``exposure.group_id`` for older universes. 

1022 

1023 This algorithm *assumes* group ID (generally populated from 

1024 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique, 

1025 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't, 

1026 it will be impossible to ever use both this grouping algorithm and the 

1027 one-to-one algorithm for a particular camera in the same data repository. 

1028 """ 

1029 

1030 ConfigClass = _GroupExposuresByGroupMetadataConfig 

1031 

1032 def find_missing( 

1033 self, exposures: list[DimensionRecord], registry: lsst.daf.butler.Registry 

1034 ) -> list[DimensionRecord]: 

1035 groups = self.group_exposures(exposures) 

1036 # Determine which group implementation we are using. 

1037 if "group" in registry.dimensions["exposure"].implied: 

1038 group_key = "group" 

1039 else: 

1040 group_key = "group_name" 

1041 missing_exposures: list[DimensionRecord] = [] 

1042 for exposures_in_group in groups.values(): 

1043 # We can not tell how many exposures are expected to be in the 

1044 # visit so we have to query every time. 

1045 first = exposures_in_group[0] 

1046 records = set( 

1047 registry.queryDimensionRecords( 

1048 "exposure", 

1049 where=f"exposure.{group_key} = groupnam", 

1050 bind={"groupnam": getattr(first, group_key)}, 

1051 instrument=first.instrument, 

1052 ) 

1053 ) 

1054 records.difference_update(set(exposures_in_group)) 

1055 missing_exposures.extend(list(records)) 

1056 return missing_exposures 

1057 

1058 def group_exposures(self, exposures: list[DimensionRecord]) -> dict[Any, list[DimensionRecord]]: 

1059 groups = defaultdict(list) 

1060 group_key = "group" 

1061 if exposures and hasattr(exposures[0], "group_name"): 

1062 group_key = "group_name" 

1063 for exposure in exposures: 

1064 groups[getattr(exposure, group_key)].append(exposure) 

1065 return groups 

1066 

1067 def group( 

1068 self, exposures: list[DimensionRecord], instrument: Instrument 

1069 ) -> Iterable[VisitDefinitionData]: 

1070 # Docstring inherited from GroupExposuresTask. 

1071 visit_systems = {VisitSystem.from_name("by-group-metadata")} 

1072 groups = self.group_exposures(exposures) 

1073 has_group_dimension: bool | None = None 

1074 for visitName, exposuresInGroup in groups.items(): 

1075 instrument_name = exposuresInGroup[0].instrument 

1076 assert instrument_name == instrument.getName(), "Inconsistency in instrument name" 

1077 visit_ids: set[int] = set() 

1078 if has_group_dimension is None: 

1079 has_group_dimension = hasattr(exposuresInGroup[0], "group") 

1080 if has_group_dimension: 

1081 visit_ids = {instrument.group_name_to_group_id(e.group) for e in exposuresInGroup} 

1082 else: 

1083 visit_ids = {e.group_id for e in exposuresInGroup} 

1084 assert len(visit_ids) == 1, "Grouping by exposure group does not yield consistent group IDs" 

1085 yield VisitDefinitionData( 

1086 instrument=instrument_name, 

1087 id=visit_ids.pop(), 

1088 name=visitName, 

1089 exposures=exposuresInGroup, 

1090 visit_systems=visit_systems, 

1091 ) 

1092 

1093 def getVisitSystems(self) -> set[VisitSystem]: 

1094 # Docstring inherited from GroupExposuresTask. 

1095 return set(VisitSystem.from_names(["by-group-metadata"])) 

1096 

1097 

1098class _GroupExposuresByCounterAndExposuresConfig(GroupExposuresConfig): 

1099 visitSystemId: Field[int] = Field( 

1100 doc="Integer ID of the visit_system implemented by this grouping algorithm.", 

1101 dtype=int, 

1102 default=2, 

1103 deprecated="No longer used. Replaced by enum.", 

1104 ) 

1105 visitSystemName: Field[str] = Field( 

1106 doc="String name of the visit_system implemented by this grouping algorithm.", 

1107 dtype=str, 

1108 default="by-counter-and-exposures", 

1109 deprecated="No longer used. Replaced by enum.", 

1110 ) 

1111 

1112 

1113@registerConfigurable("one-to-one-and-by-counter", GroupExposuresTask.registry) 

1114class _GroupExposuresByCounterAndExposuresTask(GroupExposuresTask, metaclass=ABCMeta): 

1115 """An exposure grouping algorithm that uses the sequence start and 

1116 sequence end metadata to create multi-exposure visits, but also 

1117 creates one-to-one visits. 

1118 

1119 This algorithm uses the exposure.seq_start and 

1120 exposure.seq_end fields to collect related snaps. 

1121 It also groups single exposures. 

1122 """ 

1123 

1124 ConfigClass = _GroupExposuresByCounterAndExposuresConfig 

1125 

1126 def find_missing( 

1127 self, exposures: list[DimensionRecord], registry: lsst.daf.butler.Registry 

1128 ) -> list[DimensionRecord]: 

1129 """Analyze the exposures and return relevant exposures known to 

1130 registry. 

1131 """ 

1132 groups = self.group_exposures(exposures) 

1133 missing_exposures: list[DimensionRecord] = [] 

1134 for exposures_in_group in groups.values(): 

1135 sorted_exposures = sorted(exposures_in_group, key=lambda e: e.seq_num) 

1136 first = sorted_exposures[0] 

1137 

1138 # Only need to look for the seq_nums that we don't already have. 

1139 seq_nums = set(range(first.seq_start, first.seq_end + 1)) 

1140 seq_nums.difference_update({exp.seq_num for exp in sorted_exposures}) 

1141 

1142 if seq_nums: 

1143 # Missing something. Check registry. 

1144 records = list( 

1145 registry.queryDimensionRecords( 

1146 "exposure", 

1147 where="exposure.seq_start = seq_start AND exposure.seq_end = seq_end AND " 

1148 "exposure.seq_num IN (seq_nums)", 

1149 bind={"seq_start": first.seq_start, "seq_end": first.seq_end, "seq_nums": seq_nums}, 

1150 instrument=first.instrument, 

1151 ) 

1152 ) 

1153 missing_exposures.extend(records) 

1154 

1155 return missing_exposures 

1156 

1157 def group_exposures(self, exposures: list[DimensionRecord]) -> dict[Any, list[DimensionRecord]]: 

1158 groups = defaultdict(list) 

1159 for exposure in exposures: 

1160 groups[exposure.day_obs, exposure.seq_start, exposure.seq_end].append(exposure) 

1161 return groups 

1162 

1163 def group( 

1164 self, exposures: list[DimensionRecord], instrument: Instrument 

1165 ) -> Iterable[VisitDefinitionData]: 

1166 # Docstring inherited from GroupExposuresTask. 

1167 system_one_to_one = VisitSystem.from_name("one-to-one") 

1168 system_seq_start_end = VisitSystem.from_name("by-seq-start-end") 

1169 

1170 groups = self.group_exposures(exposures) 

1171 for visit_key, exposures_in_group in groups.items(): 

1172 instrument_name = exposures_in_group[0].instrument 

1173 

1174 # It is possible that the first exposure in a visit has not 

1175 # been ingested. This can be determined and if that is the case 

1176 # we can not reliably define the multi-exposure visit. 

1177 skip_multi = False 

1178 sorted_exposures = sorted(exposures_in_group, key=lambda e: e.seq_num) 

1179 first = sorted_exposures.pop(0) 

1180 if first.seq_num != first.seq_start: 

1181 # Special case seq_num == 0 since that implies that the 

1182 # instrument has no counters and therefore no multi-exposure 

1183 # visits. 

1184 if first.seq_num != 0: 

1185 self.log.warning( 

1186 "First exposure for visit %s is not present. Skipping the multi-snap definition.", 

1187 visit_key, 

1188 ) 

1189 skip_multi = True 

1190 

1191 multi_exposure = False 

1192 if first.seq_start != first.seq_end: 

1193 # This is a multi-exposure visit regardless of the number 

1194 # of exposures present. 

1195 multi_exposure = True 

1196 

1197 # Define the one-to-one visits. 

1198 for exposure in exposures_in_group: 

1199 # Default is to use the exposure ID and name unless 

1200 # this is the first exposure in a multi-exposure visit. 

1201 visit_name = exposure.obs_id 

1202 visit_id = exposure.id 

1203 visit_systems = {system_one_to_one} 

1204 

1205 if not multi_exposure: 

1206 # This is also a by-counter visit. 

1207 # It will use the same visit_name and visit_id. 

1208 visit_systems.add(system_seq_start_end) 

1209 

1210 elif not skip_multi and exposure == first: 

1211 # This is the first legitimate exposure in a multi-exposure 

1212 # visit. It therefore needs a modified visit name and ID 

1213 # so it does not clash with the multi-exposure visit 

1214 # definition. 

1215 visit_name = f"{visit_name}_first" 

1216 visit_id = int(f"9{visit_id}") 

1217 

1218 yield VisitDefinitionData( 

1219 instrument=instrument_name, 

1220 id=visit_id, 

1221 name=visit_name, 

1222 exposures=[exposure], 

1223 visit_systems=visit_systems, 

1224 ) 

1225 

1226 # Multi-exposure visit. 

1227 if not skip_multi and multi_exposure: 

1228 # Define the visit using the first exposure 

1229 visit_name = first.obs_id 

1230 visit_id = first.id 

1231 

1232 yield VisitDefinitionData( 

1233 instrument=instrument_name, 

1234 id=visit_id, 

1235 name=visit_name, 

1236 exposures=exposures_in_group, 

1237 visit_systems={system_seq_start_end}, 

1238 ) 

1239 

1240 def getVisitSystems(self) -> set[VisitSystem]: 

1241 # Docstring inherited from GroupExposuresTask. 

1242 # Using a Config for this is difficult because what this grouping 

1243 # algorithm is doing is using two visit systems. 

1244 # One is using metadata (but not by-group) and the other is the 

1245 # one-to-one. For now hard-code in class. 

1246 return set(VisitSystem.from_names(["one-to-one", "by-seq-start-end"])) 

1247 

1248 

1249class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig): 

1250 mergeExposures: Field[bool] = Field( 

1251 doc=( 

1252 "If True, merge per-detector regions over all exposures in a " 

1253 "visit (via convex hull) instead of using the first exposure and " 

1254 "assuming its regions are valid for all others." 

1255 ), 

1256 dtype=bool, 

1257 default=False, 

1258 ) 

1259 detectorId: Field[int | None] = Field( 

1260 doc=( 

1261 "Load the WCS for the detector with this ID. If None, use an " 

1262 "arbitrary detector (the first found in a query of the data " 

1263 "repository for each exposure (or all exposures, if " 

1264 "mergeExposures is True)." 

1265 ), 

1266 dtype=int, 

1267 optional=True, 

1268 default=None, 

1269 ) 

1270 requireVersionedCamera: Field[bool] = Field( 

1271 doc=( 

1272 "If True, raise LookupError if version camera geometry cannot be " 

1273 "loaded for an exposure. If False, use the nominal camera from " 

1274 "the Instrument class instead." 

1275 ), 

1276 dtype=bool, 

1277 optional=False, 

1278 default=False, 

1279 ) 

1280 

1281 

1282@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry) 

1283class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask): 

1284 """A visit region calculator that uses a single raw WCS (recomputed from 

1285 the ``exposure`` dimension record) and a camera to project the bounding 

1286 boxes of all detectors onto the sky, relating different detectors by their 

1287 positions in focal plane coordinates. 

1288 

1289 Notes 

1290 ----- 

1291 Most instruments should have their raw WCSs determined from a combination 

1292 of boresight angle, rotator angle, and camera geometry, and hence this 

1293 algorithm should produce stable results regardless of which detector the 

1294 raw corresponds to. If this is not the case (e.g. because a per-file FITS 

1295 WCS is used instead), either the ID of the detector should be fixed (see 

1296 the ``detectorId`` config parameter) or a different algorithm used. 

1297 """ 

1298 

1299 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig 

1300 config: _ComputeVisitRegionsFromSingleRawWcsConfig 

1301 

1302 def computeExposureBounds( 

1303 self, exposure: DimensionRecord, *, collections: Any = None 

1304 ) -> dict[int, list[UnitVector3d]]: 

1305 """Compute the lists of unit vectors on the sphere that correspond to 

1306 the sky positions of detector corners. 

1307 

1308 Parameters 

1309 ---------- 

1310 exposure : `DimensionRecord` 

1311 Dimension record for the exposure. 

1312 collections : Any, optional 

1313 Collections to be searched for raws and camera geometry, overriding 

1314 ``self.butler.collections.defaults``. 

1315 Can be any of the types supported by the ``collections`` argument 

1316 to butler construction. 

1317 

1318 Returns 

1319 ------- 

1320 bounds : `dict` 

1321 Dictionary mapping detector ID to a list of unit vectors on the 

1322 sphere representing that detector's corners projected onto the sky. 

1323 """ 

1324 if collections is None: 

1325 collections = list(self.butler.collections.defaults) 

1326 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections) 

1327 if not versioned and self.config.requireVersionedCamera: 

1328 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.") 

1329 

1330 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees) 

1331 radec = lsst.geom.SpherePoint( 

1332 lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees), 

1333 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees), 

1334 ) 

1335 

1336 if self.config.detectorId is None: 

1337 detectorId = next(camera.getIdIter()) 

1338 else: 

1339 detectorId = self.config.detectorId 

1340 wcsDetector = camera[detectorId] 

1341 

1342 # Ask the raw formatter to create the relevant WCS 

1343 # This allows flips to be taken into account 

1344 instrument = self.getInstrument(exposure.instrument) 

1345 rawFormatter = instrument.getRawFormatter({"detector": detectorId}) 

1346 

1347 try: 

1348 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector) # type: ignore 

1349 except AttributeError: 

1350 raise TypeError( 

1351 f"Raw formatter is {get_full_type_name(rawFormatter)} but visit" 

1352 " definition requires it to support 'makeRawSkyWcsFromBoresight'" 

1353 ) from None 

1354 

1355 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform()) 

1356 bounds = {} 

1357 for detector in camera: 

1358 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky) 

1359 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners() 

1360 bounds[detector.getId()] = [ 

1361 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners) 

1362 ] 

1363 return bounds 

1364 

1365 def compute( 

1366 self, visit: VisitDefinitionData, *, collections: Any = None 

1367 ) -> tuple[Region, dict[int, Region]]: 

1368 # Docstring inherited from ComputeVisitRegionsTask. 

1369 if self.config.mergeExposures: 

1370 detectorBounds: dict[int, list[UnitVector3d]] = defaultdict(list) 

1371 for exposure in visit.exposures: 

1372 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections) 

1373 for detectorId, bounds in exposureDetectorBounds.items(): 

1374 detectorBounds[detectorId].extend(bounds) 

1375 else: 

1376 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections) 

1377 visitBounds = [] 

1378 detectorRegions = {} 

1379 for detectorId, bounds in detectorBounds.items(): 

1380 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds) 

1381 visitBounds.extend(bounds) 

1382 return ConvexPolygon.convexHull(visitBounds), detectorRegions