Coverage for python/lsst/obs/base/defineVisits.py: 35%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = [
25 "DefineVisitsConfig",
26 "DefineVisitsTask",
27 "GroupExposuresConfig",
28 "GroupExposuresTask",
29 "VisitDefinitionData",
30]
32import dataclasses
33import operator
34from abc import ABCMeta, abstractmethod
35from collections import defaultdict
36from typing import Any, Callable, ClassVar, Dict, Iterable, List, Optional, Set, Tuple, TypeVar
38import lsst.geom
39from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS
40from lsst.daf.butler import (
41 Butler,
42 DataCoordinate,
43 DataId,
44 DimensionGraph,
45 DimensionRecord,
46 Progress,
47 Timespan,
48)
49from lsst.geom import Box2D
50from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable
51from lsst.pipe.base import Task
52from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d
54from ._instrument import Instrument, loadCamera
57@dataclasses.dataclass
58class VisitDefinitionData:
59 """Struct representing a group of exposures that will be used to define a
60 visit.
61 """
63 instrument: str
64 """Name of the instrument this visit will be associated with.
65 """
67 id: int
68 """Integer ID of the visit.
70 This must be unique across all visit systems for the instrument.
71 """
73 name: str
74 """String name for the visit.
76 This must be unique across all visit systems for the instrument.
77 """
79 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list)
80 """Dimension records for the exposures that are part of this visit.
81 """
84@dataclasses.dataclass
85class _VisitRecords:
86 """Struct containing the dimension records associated with a visit."""
88 visit: DimensionRecord
89 """Record for the 'visit' dimension itself.
90 """
92 visit_definition: List[DimensionRecord]
93 """Records for 'visit_definition', which relates 'visit' to 'exposure'.
94 """
96 visit_detector_region: List[DimensionRecord]
97 """Records for 'visit_detector_region', which associates the combination
98 of a 'visit' and a 'detector' with a region on the sky.
99 """
102class GroupExposuresConfig(Config):
103 pass
106class GroupExposuresTask(Task, metaclass=ABCMeta):
107 """Abstract base class for the subtask of `DefineVisitsTask` that is
108 responsible for grouping exposures into visits.
110 Subclasses should be registered with `GroupExposuresTask.registry` to
111 enable use by `DefineVisitsTask`, and should generally correspond to a
112 particular 'visit_system' dimension value. They are also responsible for
113 defining visit IDs and names that are unique across all visit systems in
114 use by an instrument.
116 Parameters
117 ----------
118 config : `GroupExposuresConfig`
119 Configuration information.
120 **kwargs
121 Additional keyword arguments forwarded to the `Task` constructor.
122 """
124 def __init__(self, config: GroupExposuresConfig, **kwargs: Any):
125 Task.__init__(self, config=config, **kwargs)
127 ConfigClass = GroupExposuresConfig
129 _DefaultName = "groupExposures"
131 registry = makeRegistry(
132 doc="Registry of algorithms for grouping exposures into visits.",
133 configBaseType=GroupExposuresConfig,
134 )
136 @abstractmethod
137 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
138 """Group the given exposures into visits.
140 Parameters
141 ----------
142 exposures : `list` [ `DimensionRecord` ]
143 DimensionRecords (for the 'exposure' dimension) describing the
144 exposures to group.
146 Returns
147 -------
148 visits : `Iterable` [ `VisitDefinitionData` ]
149 Structs identifying the visits and the exposures associated with
150 them. This may be an iterator or a container.
151 """
152 raise NotImplementedError()
154 @abstractmethod
155 def getVisitSystem(self) -> Tuple[int, str]:
156 """Return identifiers for the 'visit_system' dimension this
157 algorithm implements.
159 Returns
160 -------
161 id : `int`
162 Integer ID for the visit system (given an instrument).
163 name : `str`
164 Unique string identifier for the visit system (given an
165 instrument).
166 """
167 raise NotImplementedError()
170class ComputeVisitRegionsConfig(Config):
171 padding = Field(
172 dtype=int,
173 default=250,
174 doc=(
175 "Pad raw image bounding boxes with specified number of pixels "
176 "when calculating their (conservatively large) region on the "
177 "sky. Note that the config value for pixelMargin of the "
178 "reference object loaders in meas_algorithms should be <= "
179 "the value set here."
180 ),
181 )
184class ComputeVisitRegionsTask(Task, metaclass=ABCMeta):
185 """Abstract base class for the subtask of `DefineVisitsTask` that is
186 responsible for extracting spatial regions for visits and visit+detector
187 combinations.
189 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to
190 enable use by `DefineVisitsTask`.
192 Parameters
193 ----------
194 config : `ComputeVisitRegionsConfig`
195 Configuration information.
196 butler : `lsst.daf.butler.Butler`
197 The butler to use.
198 **kwargs
199 Additional keyword arguments forwarded to the `Task` constructor.
200 """
202 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any):
203 Task.__init__(self, config=config, **kwargs)
204 self.butler = butler
205 self.instrumentMap: Dict[str, Instrument] = {}
207 ConfigClass = ComputeVisitRegionsConfig
209 _DefaultName = "computeVisitRegions"
211 registry = makeRegistry(
212 doc=(
213 "Registry of algorithms for computing on-sky regions for visits "
214 "and visit+detector combinations."
215 ),
216 configBaseType=ComputeVisitRegionsConfig,
217 )
219 def getInstrument(self, instrumentName: str) -> Instrument:
220 """Retrieve an `~lsst.obs.base.Instrument` associated with this
221 instrument name.
223 Parameters
224 ----------
225 instrumentName : `str`
226 The name of the instrument.
228 Returns
229 -------
230 instrument : `~lsst.obs.base.Instrument`
231 The associated instrument object.
233 Notes
234 -----
235 The result is cached.
236 """
237 instrument = self.instrumentMap.get(instrumentName)
238 if instrument is None:
239 instrument = Instrument.fromName(instrumentName, self.butler.registry)
240 self.instrumentMap[instrumentName] = instrument
241 return instrument
243 @abstractmethod
244 def compute(
245 self, visit: VisitDefinitionData, *, collections: Any = None
246 ) -> Tuple[Region, Dict[int, Region]]:
247 """Compute regions for the given visit and all detectors in that visit.
249 Parameters
250 ----------
251 visit : `VisitDefinitionData`
252 Struct describing the visit and the exposures associated with it.
253 collections : Any, optional
254 Collections to be searched for raws and camera geometry, overriding
255 ``self.butler.collections``.
256 Can be any of the types supported by the ``collections`` argument
257 to butler construction.
259 Returns
260 -------
261 visitRegion : `lsst.sphgeom.Region`
262 Region for the full visit.
263 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ]
264 Dictionary mapping detector ID to the region for that detector.
265 Should include all detectors in the visit.
266 """
267 raise NotImplementedError()
270class DefineVisitsConfig(Config):
271 groupExposures = GroupExposuresTask.registry.makeField(
272 doc="Algorithm for grouping exposures into visits.",
273 default="one-to-one",
274 )
275 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField(
276 doc="Algorithm from computing visit and visit+detector regions.",
277 default="single-raw-wcs",
278 )
279 ignoreNonScienceExposures = Field(
280 doc=(
281 "If True, silently ignore input exposures that do not have "
282 "observation_type=SCIENCE. If False, raise an exception if one "
283 "encountered."
284 ),
285 dtype=bool,
286 optional=False,
287 default=True,
288 )
291class DefineVisitsTask(Task):
292 """Driver Task for defining visits (and their spatial regions) in Gen3
293 Butler repositories.
295 Parameters
296 ----------
297 config : `DefineVisitsConfig`
298 Configuration for the task.
299 butler : `~lsst.daf.butler.Butler`
300 Writeable butler instance. Will be used to read `raw.wcs` and `camera`
301 datasets and insert/sync dimension data.
302 **kwargs
303 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task`
304 constructor.
306 Notes
307 -----
308 Each instance of `DefineVisitsTask` reads from / writes to the same Butler.
309 Each invocation of `DefineVisitsTask.run` processes an independent group of
310 exposures into one or more new vists, all belonging to the same visit
311 system and instrument.
313 The actual work of grouping exposures and computing regions is delegated
314 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`),
315 respectively. The defaults are to create one visit for every exposure,
316 and to use exactly one (arbitrary) detector-level raw dataset's WCS along
317 with camera geometry to compute regions for all detectors. Other
318 implementations can be created and configured for instruments for which
319 these choices are unsuitable (e.g. because visits and exposures are not
320 one-to-one, or because ``raw.wcs`` datasets for different detectors may not
321 be consistent with camera geomery).
323 It is not necessary in general to ingest all raws for an exposure before
324 defining a visit that includes the exposure; this depends entirely on the
325 `ComputeVisitRegionTask` subclass used. For the default configuration,
326 a single raw for each exposure is sufficient.
328 Defining the same visit the same way multiple times (e.g. via multiple
329 invocations of this task on the same exposures, with the same
330 configuration) is safe, but it may be inefficient, as most of the work must
331 be done before new visits can be compared to existing visits.
332 """
334 def __init__(self, config: DefineVisitsConfig, *, butler: Butler, **kwargs: Any):
335 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here.
336 super().__init__(config, **kwargs)
337 self.butler = butler
338 self.universe = self.butler.registry.dimensions
339 self.progress = Progress("obs.base.DefineVisitsTask")
340 self.makeSubtask("groupExposures")
341 self.makeSubtask("computeVisitRegions", butler=self.butler)
343 def _reduce_kwargs(self) -> dict:
344 # Add extra parameters to pickle
345 return dict(**super()._reduce_kwargs(), butler=self.butler)
347 ConfigClass: ClassVar[Config] = DefineVisitsConfig
349 _DefaultName: ClassVar[str] = "defineVisits"
351 groupExposures: GroupExposuresTask
352 computeVisitRegions: ComputeVisitRegionsTask
354 def _buildVisitRecords(
355 self, definition: VisitDefinitionData, *, collections: Any = None
356 ) -> _VisitRecords:
357 """Build the DimensionRecords associated with a visit.
359 Parameters
360 ----------
361 definition : `VisitDefinition`
362 Struct with identifiers for the visit and records for its
363 constituent exposures.
364 collections : Any, optional
365 Collections to be searched for raws and camera geometry, overriding
366 ``self.butler.collections``.
367 Can be any of the types supported by the ``collections`` argument
368 to butler construction.
370 Results
371 -------
372 records : `_VisitRecords`
373 Struct containing DimensionRecords for the visit, including
374 associated dimension elements.
375 """
376 # Compute all regions.
377 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute(
378 definition, collections=collections
379 )
380 # Aggregate other exposure quantities.
381 timespan = Timespan(
382 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)),
383 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)),
384 )
385 exposure_time = _reduceOrNone(operator.add, (e.exposure_time for e in definition.exposures))
386 physical_filter = _reduceOrNone(_value_if_equal, (e.physical_filter for e in definition.exposures))
387 target_name = _reduceOrNone(_value_if_equal, (e.target_name for e in definition.exposures))
388 science_program = _reduceOrNone(_value_if_equal, (e.science_program for e in definition.exposures))
390 # observing day for a visit is defined by the earliest observation
391 # of the visit
392 observing_day = _reduceOrNone(min, (e.day_obs for e in definition.exposures))
393 observation_reason = _reduceOrNone(
394 _value_if_equal, (e.observation_reason for e in definition.exposures)
395 )
396 if observation_reason is None:
397 # Be explicit about there being multiple reasons
398 # MyPy can't really handle DimensionRecord fields as
399 # DimensionRecord classes are dynamically defined; easiest to just
400 # shush it when it complains.
401 observation_reason = "various" # type: ignore
403 # Use the mean zenith angle as an approximation
404 zenith_angle = _reduceOrNone(operator.add, (e.zenith_angle for e in definition.exposures))
405 if zenith_angle is not None:
406 zenith_angle /= len(definition.exposures)
408 # Construct the actual DimensionRecords.
409 return _VisitRecords(
410 visit=self.universe["visit"].RecordClass(
411 instrument=definition.instrument,
412 id=definition.id,
413 name=definition.name,
414 physical_filter=physical_filter,
415 target_name=target_name,
416 science_program=science_program,
417 observation_reason=observation_reason,
418 day_obs=observing_day,
419 zenith_angle=zenith_angle,
420 visit_system=self.groupExposures.getVisitSystem()[0],
421 exposure_time=exposure_time,
422 timespan=timespan,
423 region=visitRegion,
424 # TODO: no seeing value in exposure dimension records, so we
425 # can't set that here. But there are many other columns that
426 # both dimensions should probably have as well.
427 ),
428 visit_definition=[
429 self.universe["visit_definition"].RecordClass(
430 instrument=definition.instrument,
431 visit=definition.id,
432 exposure=exposure.id,
433 visit_system=self.groupExposures.getVisitSystem()[0],
434 )
435 for exposure in definition.exposures
436 ],
437 visit_detector_region=[
438 self.universe["visit_detector_region"].RecordClass(
439 instrument=definition.instrument,
440 visit=definition.id,
441 detector=detectorId,
442 region=detectorRegion,
443 )
444 for detectorId, detectorRegion in visitDetectorRegions.items()
445 ],
446 )
448 def run(
449 self,
450 dataIds: Iterable[DataId],
451 *,
452 collections: Optional[str] = None,
453 update_records: bool = False,
454 ) -> None:
455 """Add visit definitions to the registry for the given exposures.
457 Parameters
458 ----------
459 dataIds : `Iterable` [ `dict` or `DataCoordinate` ]
460 Exposure-level data IDs. These must all correspond to the same
461 instrument, and are expected to be on-sky science exposures.
462 collections : Any, optional
463 Collections to be searched for raws and camera geometry, overriding
464 ``self.butler.collections``.
465 Can be any of the types supported by the ``collections`` argument
466 to butler construction.
467 update_records : `bool`, optional
468 If `True` (`False` is default), update existing visit records that
469 conflict with the new ones instead of rejecting them (and when this
470 occurs, update visit_detector_region as well). THIS IS AN ADVANCED
471 OPTION THAT SHOULD ONLY BE USED TO FIX REGIONS AND/OR METADATA THAT
472 ARE KNOWN TO BE BAD, AND IT CANNOT BE USED TO REMOVE EXPOSURES OR
473 DETECTORS FROM A VISIT.
475 Raises
476 ------
477 lsst.daf.butler.registry.ConflictingDefinitionError
478 Raised if a visit ID conflict is detected and the existing visit
479 differs from the new one.
480 """
481 # Normalize, expand, and deduplicate data IDs.
482 self.log.info("Preprocessing data IDs.")
483 dimensions = DimensionGraph(self.universe, names=["exposure"])
484 data_id_set: Set[DataCoordinate] = {
485 self.butler.registry.expandDataId(d, graph=dimensions) for d in dataIds
486 }
487 if not data_id_set:
488 raise RuntimeError("No exposures given.")
489 # Extract exposure DimensionRecords, check that there's only one
490 # instrument in play, and check for non-science exposures.
491 exposures = []
492 instruments = set()
493 for dataId in data_id_set:
494 record = dataId.records["exposure"]
495 assert record is not None, "Guaranteed by expandDataIds call earlier."
496 if record.tracking_ra is None or record.tracking_dec is None or record.sky_angle is None:
497 if self.config.ignoreNonScienceExposures:
498 continue
499 else:
500 raise RuntimeError(
501 f"Input exposure {dataId} has observation_type "
502 f"{record.observation_type}, but is not on sky."
503 )
504 instruments.add(dataId["instrument"])
505 exposures.append(record)
506 if not exposures:
507 self.log.info("No on-sky exposures found after filtering.")
508 return
509 if len(instruments) > 1:
510 raise RuntimeError(
511 f"All data IDs passed to DefineVisitsTask.run must be "
512 f"from the same instrument; got {instruments}."
513 )
514 (instrument,) = instruments
515 # Ensure the visit_system our grouping algorithm uses is in the
516 # registry, if it wasn't already.
517 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem()
518 self.log.info("Registering visit_system %d: %s.", visitSystemId, visitSystemName)
519 self.butler.registry.syncDimensionData(
520 "visit_system", {"instrument": instrument, "id": visitSystemId, "name": visitSystemName}
521 )
522 # Group exposures into visits, delegating to subtask.
523 self.log.info("Grouping %d exposure(s) into visits.", len(exposures))
524 definitions = list(self.groupExposures.group(exposures))
525 # Iterate over visits, compute regions, and insert dimension data, one
526 # transaction per visit. If a visit already exists, we skip all other
527 # inserts.
528 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions))
529 for visitDefinition in self.progress.wrap(
530 definitions, total=len(definitions), desc="Computing regions and inserting visits"
531 ):
532 visitRecords = self._buildVisitRecords(visitDefinition, collections=collections)
533 with self.butler.registry.transaction():
534 inserted_or_updated = self.butler.registry.syncDimensionData(
535 "visit",
536 visitRecords.visit,
537 update=update_records,
538 )
539 if inserted_or_updated:
540 if inserted_or_updated is True:
541 # This is a new visit, not an update to an existing
542 # one, so insert visit definition.
543 # We don't allow visit definitions to change even when
544 # asked to update, because we'd have to delete the old
545 # visit_definitions first and also worry about what
546 # this does to datasets that already use the visit.
547 self.butler.registry.insertDimensionData(
548 "visit_definition", *visitRecords.visit_definition
549 )
550 # [Re]Insert visit_detector_region records for both inserts
551 # and updates, because we do allow updating to affect the
552 # region calculations.
553 self.butler.registry.insertDimensionData(
554 "visit_detector_region", *visitRecords.visit_detector_region, replace=update_records
555 )
558_T = TypeVar("_T")
561def _reduceOrNone(func: Callable[[_T, _T], Optional[_T]], iterable: Iterable[Optional[_T]]) -> Optional[_T]:
562 """Apply a binary function to pairs of elements in an iterable until a
563 single value is returned, but return `None` if any element is `None` or
564 there are no elements.
565 """
566 r: Optional[_T] = None
567 for v in iterable:
568 if v is None:
569 return None
570 if r is None:
571 r = v
572 else:
573 r = func(r, v)
574 return r
577def _value_if_equal(a: _T, b: _T) -> Optional[_T]:
578 """Return either argument if they are equal, or `None` if they are not."""
579 return a if a == b else None
582class _GroupExposuresOneToOneConfig(GroupExposuresConfig):
583 visitSystemId = Field(
584 doc="Integer ID of the visit_system implemented by this grouping algorithm.",
585 dtype=int,
586 default=0,
587 )
588 visitSystemName = Field(
589 doc="String name of the visit_system implemented by this grouping algorithm.",
590 dtype=str,
591 default="one-to-one",
592 )
595@registerConfigurable("one-to-one", GroupExposuresTask.registry)
596class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta):
597 """An exposure grouping algorithm that simply defines one visit for each
598 exposure, reusing the exposures identifiers for the visit.
599 """
601 ConfigClass = _GroupExposuresOneToOneConfig
603 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
604 # Docstring inherited from GroupExposuresTask.
605 for exposure in exposures:
606 yield VisitDefinitionData(
607 instrument=exposure.instrument,
608 id=exposure.id,
609 name=exposure.obs_id,
610 exposures=[exposure],
611 )
613 def getVisitSystem(self) -> Tuple[int, str]:
614 # Docstring inherited from GroupExposuresTask.
615 return (self.config.visitSystemId, self.config.visitSystemName)
618class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig):
619 visitSystemId = Field(
620 doc="Integer ID of the visit_system implemented by this grouping algorithm.",
621 dtype=int,
622 default=1,
623 )
624 visitSystemName = Field(
625 doc="String name of the visit_system implemented by this grouping algorithm.",
626 dtype=str,
627 default="by-group-metadata",
628 )
631@registerConfigurable("by-group-metadata", GroupExposuresTask.registry)
632class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta):
633 """An exposure grouping algorithm that uses exposure.group_name and
634 exposure.group_id.
636 This algorithm _assumes_ exposure.group_id (generally populated from
637 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique,
638 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't,
639 it will be impossible to ever use both this grouping algorithm and the
640 one-to-one algorithm for a particular camera in the same data repository.
641 """
643 ConfigClass = _GroupExposuresByGroupMetadataConfig
645 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
646 # Docstring inherited from GroupExposuresTask.
647 groups = defaultdict(list)
648 for exposure in exposures:
649 groups[exposure.group_name].append(exposure)
650 for visitName, exposuresInGroup in groups.items():
651 instrument = exposuresInGroup[0].instrument
652 visitId = exposuresInGroup[0].group_id
653 assert all(
654 e.group_id == visitId for e in exposuresInGroup
655 ), "Grouping by exposure.group_name does not yield consistent group IDs"
656 yield VisitDefinitionData(
657 instrument=instrument, id=visitId, name=visitName, exposures=exposuresInGroup
658 )
660 def getVisitSystem(self) -> Tuple[int, str]:
661 # Docstring inherited from GroupExposuresTask.
662 return (self.config.visitSystemId, self.config.visitSystemName)
665class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig):
666 mergeExposures = Field(
667 doc=(
668 "If True, merge per-detector regions over all exposures in a "
669 "visit (via convex hull) instead of using the first exposure and "
670 "assuming its regions are valid for all others."
671 ),
672 dtype=bool,
673 default=False,
674 )
675 detectorId = Field(
676 doc=(
677 "Load the WCS for the detector with this ID. If None, use an "
678 "arbitrary detector (the first found in a query of the data "
679 "repository for each exposure (or all exposures, if "
680 "mergeExposures is True)."
681 ),
682 dtype=int,
683 optional=True,
684 default=None,
685 )
686 requireVersionedCamera = Field(
687 doc=(
688 "If True, raise LookupError if version camera geometry cannot be "
689 "loaded for an exposure. If False, use the nominal camera from "
690 "the Instrument class instead."
691 ),
692 dtype=bool,
693 optional=False,
694 default=False,
695 )
698@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry)
699class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask):
700 """A visit region calculator that uses a single raw WCS and a camera to
701 project the bounding boxes of all detectors onto the sky, relating
702 different detectors by their positions in focal plane coordinates.
704 Notes
705 -----
706 Most instruments should have their raw WCSs determined from a combination
707 of boresight angle, rotator angle, and camera geometry, and hence this
708 algorithm should produce stable results regardless of which detector the
709 raw corresponds to. If this is not the case (e.g. because a per-file FITS
710 WCS is used instead), either the ID of the detector should be fixed (see
711 the ``detectorId`` config parameter) or a different algorithm used.
712 """
714 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig
716 def computeExposureBounds(
717 self, exposure: DimensionRecord, *, collections: Any = None
718 ) -> Dict[int, List[UnitVector3d]]:
719 """Compute the lists of unit vectors on the sphere that correspond to
720 the sky positions of detector corners.
722 Parameters
723 ----------
724 exposure : `DimensionRecord`
725 Dimension record for the exposure.
726 collections : Any, optional
727 Collections to be searched for raws and camera geometry, overriding
728 ``self.butler.collections``.
729 Can be any of the types supported by the ``collections`` argument
730 to butler construction.
732 Returns
733 -------
734 bounds : `dict`
735 Dictionary mapping detector ID to a list of unit vectors on the
736 sphere representing that detector's corners projected onto the sky.
737 """
738 if collections is None:
739 collections = self.butler.collections
740 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections)
741 if not versioned and self.config.requireVersionedCamera:
742 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.")
744 # Derive WCS from boresight information -- if available in registry
745 use_registry = True
746 try:
747 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees)
748 radec = lsst.geom.SpherePoint(
749 lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees),
750 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees),
751 )
752 except AttributeError:
753 use_registry = False
755 if use_registry:
756 if self.config.detectorId is None:
757 detectorId = next(camera.getIdIter())
758 else:
759 detectorId = self.config.detectorId
760 wcsDetector = camera[detectorId]
762 # Ask the raw formatter to create the relevant WCS
763 # This allows flips to be taken into account
764 instrument = self.getInstrument(exposure.instrument)
765 rawFormatter = instrument.getRawFormatter({"detector": detectorId})
766 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector)
768 else:
769 if self.config.detectorId is None:
770 wcsRefsIter = self.butler.registry.queryDatasets(
771 "raw.wcs", dataId=exposure.dataId, collections=collections
772 )
773 if not wcsRefsIter:
774 raise LookupError(
775 f"No raw.wcs datasets found for data ID {exposure.dataId} "
776 f"in collections {collections}."
777 )
778 wcsRef = next(iter(wcsRefsIter))
779 wcsDetector = camera[wcsRef.dataId["detector"]]
780 wcs = self.butler.getDirect(wcsRef)
781 else:
782 wcsDetector = camera[self.config.detectorId]
783 wcs = self.butler.get(
784 "raw.wcs",
785 dataId=exposure.dataId,
786 detector=self.config.detectorId,
787 collections=collections,
788 )
789 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform())
790 bounds = {}
791 for detector in camera:
792 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky)
793 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners()
794 bounds[detector.getId()] = [
795 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners)
796 ]
797 return bounds
799 def compute(
800 self, visit: VisitDefinitionData, *, collections: Any = None
801 ) -> Tuple[Region, Dict[int, Region]]:
802 # Docstring inherited from ComputeVisitRegionsTask.
803 if self.config.mergeExposures:
804 detectorBounds: Dict[int, List[UnitVector3d]] = defaultdict(list)
805 for exposure in visit.exposures:
806 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections)
807 for detectorId, bounds in exposureDetectorBounds.items():
808 detectorBounds[detectorId].extend(bounds)
809 else:
810 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections)
811 visitBounds = []
812 detectorRegions = {}
813 for detectorId, bounds in detectorBounds.items():
814 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds)
815 visitBounds.extend(bounds)
816 return ConvexPolygon.convexHull(visitBounds), detectorRegions