Coverage for python/lsst/obs/base/defineVisits.py: 34%
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = [
25 "DefineVisitsConfig",
26 "DefineVisitsTask",
27 "GroupExposuresConfig",
28 "GroupExposuresTask",
29 "VisitDefinitionData",
30]
32import dataclasses
33import itertools
34from abc import ABCMeta, abstractmethod
35from collections import defaultdict
36from multiprocessing import Pool
37from typing import Any, Dict, Iterable, List, Optional, Tuple
39import lsst.geom
40from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS
41from lsst.daf.butler import (
42 Butler,
43 DataCoordinate,
44 DataId,
45 DimensionGraph,
46 DimensionRecord,
47 Progress,
48 Timespan,
49)
50from lsst.geom import Box2D
51from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable
52from lsst.pipe.base import Task
53from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d
55from ._instrument import Instrument, loadCamera
58@dataclasses.dataclass
59class VisitDefinitionData:
60 """Struct representing a group of exposures that will be used to define a
61 visit.
62 """
64 instrument: str
65 """Name of the instrument this visit will be associated with.
66 """
68 id: int
69 """Integer ID of the visit.
71 This must be unique across all visit systems for the instrument.
72 """
74 name: str
75 """String name for the visit.
77 This must be unique across all visit systems for the instrument.
78 """
80 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list)
81 """Dimension records for the exposures that are part of this visit.
82 """
85@dataclasses.dataclass
86class _VisitRecords:
87 """Struct containing the dimension records associated with a visit."""
89 visit: DimensionRecord
90 """Record for the 'visit' dimension itself.
91 """
93 visit_definition: List[DimensionRecord]
94 """Records for 'visit_definition', which relates 'visit' to 'exposure'.
95 """
97 visit_detector_region: List[DimensionRecord]
98 """Records for 'visit_detector_region', which associates the combination
99 of a 'visit' and a 'detector' with a region on the sky.
100 """
103class GroupExposuresConfig(Config):
104 pass
107class GroupExposuresTask(Task, metaclass=ABCMeta):
108 """Abstract base class for the subtask of `DefineVisitsTask` that is
109 responsible for grouping exposures into visits.
111 Subclasses should be registered with `GroupExposuresTask.registry` to
112 enable use by `DefineVisitsTask`, and should generally correspond to a
113 particular 'visit_system' dimension value. They are also responsible for
114 defining visit IDs and names that are unique across all visit systems in
115 use by an instrument.
117 Parameters
118 ----------
119 config : `GroupExposuresConfig`
120 Configuration information.
121 **kwargs
122 Additional keyword arguments forwarded to the `Task` constructor.
123 """
125 def __init__(self, config: GroupExposuresConfig, **kwargs: Any):
126 Task.__init__(self, config=config, **kwargs)
128 ConfigClass = GroupExposuresConfig
130 _DefaultName = "groupExposures"
132 registry = makeRegistry(
133 doc="Registry of algorithms for grouping exposures into visits.",
134 configBaseType=GroupExposuresConfig,
135 )
137 @abstractmethod
138 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
139 """Group the given exposures into visits.
141 Parameters
142 ----------
143 exposures : `list` [ `DimensionRecord` ]
144 DimensionRecords (for the 'exposure' dimension) describing the
145 exposures to group.
147 Returns
148 -------
149 visits : `Iterable` [ `VisitDefinitionData` ]
150 Structs identifying the visits and the exposures associated with
151 them. This may be an iterator or a container.
152 """
153 raise NotImplementedError()
155 @abstractmethod
156 def getVisitSystem(self) -> Tuple[int, str]:
157 """Return identifiers for the 'visit_system' dimension this
158 algorithm implements.
160 Returns
161 -------
162 id : `int`
163 Integer ID for the visit system (given an instrument).
164 name : `str`
165 Unique string identifier for the visit system (given an
166 instrument).
167 """
168 raise NotImplementedError()
171class ComputeVisitRegionsConfig(Config):
172 padding = Field(
173 dtype=int,
174 default=250,
175 doc=(
176 "Pad raw image bounding boxes with specified number of pixels "
177 "when calculating their (conservatively large) region on the "
178 "sky. Note that the config value for pixelMargin of the "
179 "reference object loaders in meas_algorithms should be <= "
180 "the value set here."
181 ),
182 )
185class ComputeVisitRegionsTask(Task, metaclass=ABCMeta):
186 """Abstract base class for the subtask of `DefineVisitsTask` that is
187 responsible for extracting spatial regions for visits and visit+detector
188 combinations.
190 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to
191 enable use by `DefineVisitsTask`.
193 Parameters
194 ----------
195 config : `ComputeVisitRegionsConfig`
196 Configuration information.
197 butler : `lsst.daf.butler.Butler`
198 The butler to use.
199 **kwargs
200 Additional keyword arguments forwarded to the `Task` constructor.
201 """
203 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any):
204 Task.__init__(self, config=config, **kwargs)
205 self.butler = butler
206 self.instrumentMap = {}
208 ConfigClass = ComputeVisitRegionsConfig
210 _DefaultName = "computeVisitRegions"
212 registry = makeRegistry(
213 doc=(
214 "Registry of algorithms for computing on-sky regions for visits "
215 "and visit+detector combinations."
216 ),
217 configBaseType=ComputeVisitRegionsConfig,
218 )
220 def getInstrument(self, instrumentName) -> Instrument:
221 """Retrieve an `~lsst.obs.base.Instrument` associated with this
222 instrument name.
224 Parameters
225 ----------
226 instrumentName : `str`
227 The name of the instrument.
229 Returns
230 -------
231 instrument : `~lsst.obs.base.Instrument`
232 The associated instrument object.
234 Notes
235 -----
236 The result is cached.
237 """
238 instrument = self.instrumentMap.get(instrumentName)
239 if instrument is None:
240 instrument = Instrument.fromName(instrumentName, self.butler.registry)
241 self.instrumentMap[instrumentName] = instrument
242 return instrument
244 @abstractmethod
245 def compute(
246 self, visit: VisitDefinitionData, *, collections: Any = None
247 ) -> Tuple[Region, Dict[int, Region]]:
248 """Compute regions for the given visit and all detectors in that visit.
250 Parameters
251 ----------
252 visit : `VisitDefinitionData`
253 Struct describing the visit and the exposures associated with it.
254 collections : Any, optional
255 Collections to be searched for raws and camera geometry, overriding
256 ``self.butler.collections``.
257 Can be any of the types supported by the ``collections`` argument
258 to butler construction.
260 Returns
261 -------
262 visitRegion : `lsst.sphgeom.Region`
263 Region for the full visit.
264 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ]
265 Dictionary mapping detector ID to the region for that detector.
266 Should include all detectors in the visit.
267 """
268 raise NotImplementedError()
271class DefineVisitsConfig(Config):
272 groupExposures = GroupExposuresTask.registry.makeField(
273 doc="Algorithm for grouping exposures into visits.",
274 default="one-to-one",
275 )
276 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField(
277 doc="Algorithm from computing visit and visit+detector regions.",
278 default="single-raw-wcs",
279 )
280 ignoreNonScienceExposures = Field(
281 doc=(
282 "If True, silently ignore input exposures that do not have "
283 "observation_type=SCIENCE. If False, raise an exception if one "
284 "encountered."
285 ),
286 dtype=bool,
287 optional=False,
288 default=True,
289 )
292class DefineVisitsTask(Task):
293 """Driver Task for defining visits (and their spatial regions) in Gen3
294 Butler repositories.
296 Parameters
297 ----------
298 config : `DefineVisitsConfig`
299 Configuration for the task.
300 butler : `~lsst.daf.butler.Butler`
301 Writeable butler instance. Will be used to read `raw.wcs` and `camera`
302 datasets and insert/sync dimension data.
303 **kwargs
304 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task`
305 constructor.
307 Notes
308 -----
309 Each instance of `DefineVisitsTask` reads from / writes to the same Butler.
310 Each invocation of `DefineVisitsTask.run` processes an independent group of
311 exposures into one or more new vists, all belonging to the same visit
312 system and instrument.
314 The actual work of grouping exposures and computing regions is delegated
315 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`),
316 respectively. The defaults are to create one visit for every exposure,
317 and to use exactly one (arbitrary) detector-level raw dataset's WCS along
318 with camera geometry to compute regions for all detectors. Other
319 implementations can be created and configured for instruments for which
320 these choices are unsuitable (e.g. because visits and exposures are not
321 one-to-one, or because ``raw.wcs`` datasets for different detectors may not
322 be consistent with camera geomery).
324 It is not necessary in general to ingest all raws for an exposure before
325 defining a visit that includes the exposure; this depends entirely on the
326 `ComputeVisitRegionTask` subclass used. For the default configuration,
327 a single raw for each exposure is sufficient.
329 Defining the same visit the same way multiple times (e.g. via multiple
330 invocations of this task on the same exposures, with the same
331 configuration) is safe, but it may be inefficient, as most of the work must
332 be done before new visits can be compared to existing visits.
333 """
335 def __init__(self, config: Optional[DefineVisitsConfig] = None, *, butler: Butler, **kwargs: Any):
336 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here.
337 super().__init__(config, **kwargs)
338 self.butler = butler
339 self.universe = self.butler.registry.dimensions
340 self.progress = Progress("obs.base.DefineVisitsTask")
341 self.makeSubtask("groupExposures")
342 self.makeSubtask("computeVisitRegions", butler=self.butler)
344 def _reduce_kwargs(self):
345 # Add extra parameters to pickle
346 return dict(**super()._reduce_kwargs(), butler=self.butler)
348 ConfigClass = DefineVisitsConfig
350 _DefaultName = "defineVisits"
352 def _buildVisitRecords(
353 self, definition: VisitDefinitionData, *, collections: Any = None
354 ) -> _VisitRecords:
355 """Build the DimensionRecords associated with a visit.
357 Parameters
358 ----------
359 definition : `VisitDefinition`
360 Struct with identifiers for the visit and records for its
361 constituent exposures.
362 collections : Any, optional
363 Collections to be searched for raws and camera geometry, overriding
364 ``self.butler.collections``.
365 Can be any of the types supported by the ``collections`` argument
366 to butler construction.
368 Results
369 -------
370 records : `_VisitRecords`
371 Struct containing DimensionRecords for the visit, including
372 associated dimension elements.
373 """
374 # Compute all regions.
375 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute(
376 definition, collections=collections
377 )
378 # Aggregate other exposure quantities.
379 timespan = Timespan(
380 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)),
381 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)),
382 )
383 exposure_time = _reduceOrNone(sum, (e.exposure_time for e in definition.exposures))
384 physical_filter = _reduceOrNone(
385 lambda a, b: a if a == b else None, (e.physical_filter for e in definition.exposures)
386 )
387 target_name = _reduceOrNone(
388 lambda a, b: a if a == b else None, (e.target_name for e in definition.exposures)
389 )
390 science_program = _reduceOrNone(
391 lambda a, b: a if a == b else None, (e.science_program for e in definition.exposures)
392 )
394 # observing day for a visit is defined by the earliest observation
395 # of the visit
396 observing_day = _reduceOrNone(min, (e.day_obs for e in definition.exposures))
397 observation_reason = _reduceOrNone(
398 lambda a, b: a if a == b else None, (e.observation_reason for e in definition.exposures)
399 )
400 if observation_reason is None:
401 # Be explicit about there being multiple reasons
402 observation_reason = "various"
404 # Use the mean zenith angle as an approximation
405 zenith_angle = _reduceOrNone(sum, (e.zenith_angle for e in definition.exposures))
406 if zenith_angle is not None:
407 zenith_angle /= len(definition.exposures)
409 # Construct the actual DimensionRecords.
410 return _VisitRecords(
411 visit=self.universe["visit"].RecordClass(
412 instrument=definition.instrument,
413 id=definition.id,
414 name=definition.name,
415 physical_filter=physical_filter,
416 target_name=target_name,
417 science_program=science_program,
418 observation_reason=observation_reason,
419 day_obs=observing_day,
420 zenith_angle=zenith_angle,
421 visit_system=self.groupExposures.getVisitSystem()[0],
422 exposure_time=exposure_time,
423 timespan=timespan,
424 region=visitRegion,
425 # TODO: no seeing value in exposure dimension records, so we
426 # can't set that here. But there are many other columns that
427 # both dimensions should probably have as well.
428 ),
429 visit_definition=[
430 self.universe["visit_definition"].RecordClass(
431 instrument=definition.instrument,
432 visit=definition.id,
433 exposure=exposure.id,
434 visit_system=self.groupExposures.getVisitSystem()[0],
435 )
436 for exposure in definition.exposures
437 ],
438 visit_detector_region=[
439 self.universe["visit_detector_region"].RecordClass(
440 instrument=definition.instrument,
441 visit=definition.id,
442 detector=detectorId,
443 region=detectorRegion,
444 )
445 for detectorId, detectorRegion in visitDetectorRegions.items()
446 ],
447 )
449 def _expandExposureId(self, dataId: DataId) -> DataCoordinate:
450 """Return the expanded version of an exposure ID.
452 A private method to allow ID expansion in a pool without resorting
453 to local callables.
455 Parameters
456 ----------
457 dataId : `dict` or `DataCoordinate`
458 Exposure-level data ID.
460 Returns
461 -------
462 expanded : `DataCoordinate`
463 A data ID that includes full metadata for all exposure dimensions.
464 """
465 dimensions = DimensionGraph(self.universe, names=["exposure"])
466 return self.butler.registry.expandDataId(dataId, graph=dimensions)
468 def _buildVisitRecordsSingle(self, args) -> _VisitRecords:
469 """Build the DimensionRecords associated with a visit and collection.
471 A wrapper for `_buildVisitRecords` to allow it to be run as part of
472 a pool without resorting to local callables.
474 Parameters
475 ----------
476 args : `tuple` [`VisitDefinition`, any]
477 A tuple consisting of the ``definition`` and ``collections``
478 arguments to `_buildVisitRecords`, in that order.
480 Results
481 -------
482 records : `_VisitRecords`
483 Struct containing DimensionRecords for the visit, including
484 associated dimension elements.
485 """
486 return self._buildVisitRecords(args[0], collections=args[1])
488 def run(
489 self,
490 dataIds: Iterable[DataId],
491 *,
492 pool: Optional[Pool] = None,
493 processes: int = 1,
494 collections: Optional[str] = None,
495 update_records: bool = False,
496 ):
497 """Add visit definitions to the registry for the given exposures.
499 Parameters
500 ----------
501 dataIds : `Iterable` [ `dict` or `DataCoordinate` ]
502 Exposure-level data IDs. These must all correspond to the same
503 instrument, and are expected to be on-sky science exposures.
504 pool : `multiprocessing.Pool`, optional
505 If not `None`, a process pool with which to parallelize some
506 operations.
507 processes : `int`, optional
508 The number of processes to use. Ignored if ``pool`` is not `None`.
509 collections : Any, optional
510 Collections to be searched for raws and camera geometry, overriding
511 ``self.butler.collections``.
512 Can be any of the types supported by the ``collections`` argument
513 to butler construction.
514 update_records : `bool`, optional
515 If `True` (`False` is default), update existing visit records that
516 conflict with the new ones instead of rejecting them (and when this
517 occurs, update visit_detector_region as well). THIS IS AN ADVANCED
518 OPTION THAT SHOULD ONLY BE USED TO FIX REGIONS AND/OR METADATA THAT
519 ARE KNOWN TO BE BAD, AND IT CANNOT BE USED TO REMOVE EXPOSURES OR
520 DETECTORS FROM A VISIT.
522 Raises
523 ------
524 lsst.daf.butler.registry.ConflictingDefinitionError
525 Raised if a visit ID conflict is detected and the existing visit
526 differs from the new one.
527 """
528 # Set up multiprocessing, if desired.
529 if pool is None and processes > 1:
530 pool = Pool(processes)
531 mapFunc = map if pool is None else pool.imap_unordered
532 # Normalize, expand, and deduplicate data IDs.
533 self.log.info("Preprocessing data IDs.")
534 dataIds = set(mapFunc(self._expandExposureId, dataIds))
535 if not dataIds:
536 raise RuntimeError("No exposures given.")
537 # Extract exposure DimensionRecords, check that there's only one
538 # instrument in play, and check for non-science exposures.
539 exposures = []
540 instruments = set()
541 for dataId in dataIds:
542 record = dataId.records["exposure"]
543 if record.observation_type != "science":
544 if self.config.ignoreNonScienceExposures:
545 continue
546 else:
547 raise RuntimeError(
548 f"Input exposure {dataId} has observation_type "
549 f"{record.observation_type}, not 'science'."
550 )
551 instruments.add(dataId["instrument"])
552 exposures.append(record)
553 if not exposures:
554 self.log.info("No science exposures found after filtering.")
555 return
556 if len(instruments) > 1:
557 raise RuntimeError(
558 f"All data IDs passed to DefineVisitsTask.run must be "
559 f"from the same instrument; got {instruments}."
560 )
561 (instrument,) = instruments
562 # Ensure the visit_system our grouping algorithm uses is in the
563 # registry, if it wasn't already.
564 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem()
565 self.log.info("Registering visit_system %d: %s.", visitSystemId, visitSystemName)
566 self.butler.registry.syncDimensionData(
567 "visit_system", {"instrument": instrument, "id": visitSystemId, "name": visitSystemName}
568 )
569 # Group exposures into visits, delegating to subtask.
570 self.log.info("Grouping %d exposure(s) into visits.", len(exposures))
571 definitions = list(self.groupExposures.group(exposures))
572 # Compute regions and build DimensionRecords for each visit.
573 # This is the only parallel step, but it _should_ be the most expensive
574 # one (unless DB operations are slow).
575 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions))
576 allRecords = mapFunc(self._buildVisitRecordsSingle, zip(definitions, itertools.repeat(collections)))
577 # Iterate over visits and insert dimension data, one transaction per
578 # visit. If a visit already exists, we skip all other inserts.
579 for visitRecords in self.progress.wrap(
580 allRecords, total=len(definitions), desc="Computing regions and inserting visits"
581 ):
582 with self.butler.registry.transaction():
583 inserted_or_updated = self.butler.registry.syncDimensionData(
584 "visit",
585 visitRecords.visit,
586 update=update_records,
587 )
588 if inserted_or_updated:
589 if inserted_or_updated is True:
590 # This is a new visit, not an update to an existing
591 # one, so insert visit definition.
592 # We don't allow visit definitions to change even when
593 # asked to update, because we'd have to delete the old
594 # visit_definitions first and also worry about what
595 # this does to datasets that already use the visit.
596 self.butler.registry.insertDimensionData(
597 "visit_definition", *visitRecords.visit_definition
598 )
599 # [Re]Insert visit_detector_region records for both inserts
600 # and updates, because we do allow updating to affect the
601 # region calculations.
602 self.butler.registry.insertDimensionData(
603 "visit_detector_region", *visitRecords.visit_detector_region, replace=update_records
604 )
607def _reduceOrNone(func, iterable):
608 """Apply a binary function to pairs of elements in an iterable until a
609 single value is returned, but return `None` if any element is `None` or
610 there are no elements.
611 """
612 r = None
613 for v in iterable:
614 if v is None:
615 return None
616 if r is None:
617 r = v
618 else:
619 r = func(r, v)
620 return r
623class _GroupExposuresOneToOneConfig(GroupExposuresConfig):
624 visitSystemId = Field(
625 doc="Integer ID of the visit_system implemented by this grouping algorithm.",
626 dtype=int,
627 default=0,
628 )
629 visitSystemName = Field(
630 doc="String name of the visit_system implemented by this grouping algorithm.",
631 dtype=str,
632 default="one-to-one",
633 )
636@registerConfigurable("one-to-one", GroupExposuresTask.registry)
637class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta):
638 """An exposure grouping algorithm that simply defines one visit for each
639 exposure, reusing the exposures identifiers for the visit.
640 """
642 ConfigClass = _GroupExposuresOneToOneConfig
644 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
645 # Docstring inherited from GroupExposuresTask.
646 for exposure in exposures:
647 yield VisitDefinitionData(
648 instrument=exposure.instrument,
649 id=exposure.id,
650 name=exposure.obs_id,
651 exposures=[exposure],
652 )
654 def getVisitSystem(self) -> Tuple[int, str]:
655 # Docstring inherited from GroupExposuresTask.
656 return (self.config.visitSystemId, self.config.visitSystemName)
659class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig):
660 visitSystemId = Field(
661 doc="Integer ID of the visit_system implemented by this grouping algorithm.",
662 dtype=int,
663 default=1,
664 )
665 visitSystemName = Field(
666 doc="String name of the visit_system implemented by this grouping algorithm.",
667 dtype=str,
668 default="by-group-metadata",
669 )
672@registerConfigurable("by-group-metadata", GroupExposuresTask.registry)
673class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta):
674 """An exposure grouping algorithm that uses exposure.group_name and
675 exposure.group_id.
677 This algorithm _assumes_ exposure.group_id (generally populated from
678 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique,
679 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't,
680 it will be impossible to ever use both this grouping algorithm and the
681 one-to-one algorithm for a particular camera in the same data repository.
682 """
684 ConfigClass = _GroupExposuresByGroupMetadataConfig
686 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
687 # Docstring inherited from GroupExposuresTask.
688 groups = defaultdict(list)
689 for exposure in exposures:
690 groups[exposure.group_name].append(exposure)
691 for visitName, exposuresInGroup in groups.items():
692 instrument = exposuresInGroup[0].instrument
693 visitId = exposuresInGroup[0].group_id
694 assert all(
695 e.group_id == visitId for e in exposuresInGroup
696 ), "Grouping by exposure.group_name does not yield consistent group IDs"
697 yield VisitDefinitionData(
698 instrument=instrument, id=visitId, name=visitName, exposures=exposuresInGroup
699 )
701 def getVisitSystem(self) -> Tuple[int, str]:
702 # Docstring inherited from GroupExposuresTask.
703 return (self.config.visitSystemId, self.config.visitSystemName)
706class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig):
707 mergeExposures = Field(
708 doc=(
709 "If True, merge per-detector regions over all exposures in a "
710 "visit (via convex hull) instead of using the first exposure and "
711 "assuming its regions are valid for all others."
712 ),
713 dtype=bool,
714 default=False,
715 )
716 detectorId = Field(
717 doc=(
718 "Load the WCS for the detector with this ID. If None, use an "
719 "arbitrary detector (the first found in a query of the data "
720 "repository for each exposure (or all exposures, if "
721 "mergeExposures is True)."
722 ),
723 dtype=int,
724 optional=True,
725 default=None,
726 )
727 requireVersionedCamera = Field(
728 doc=(
729 "If True, raise LookupError if version camera geometry cannot be "
730 "loaded for an exposure. If False, use the nominal camera from "
731 "the Instrument class instead."
732 ),
733 dtype=bool,
734 optional=False,
735 default=False,
736 )
739@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry)
740class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask):
741 """A visit region calculator that uses a single raw WCS and a camera to
742 project the bounding boxes of all detectors onto the sky, relating
743 different detectors by their positions in focal plane coordinates.
745 Notes
746 -----
747 Most instruments should have their raw WCSs determined from a combination
748 of boresight angle, rotator angle, and camera geometry, and hence this
749 algorithm should produce stable results regardless of which detector the
750 raw corresponds to. If this is not the case (e.g. because a per-file FITS
751 WCS is used instead), either the ID of the detector should be fixed (see
752 the ``detectorId`` config parameter) or a different algorithm used.
753 """
755 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig
757 def computeExposureBounds(
758 self, exposure: DimensionRecord, *, collections: Any = None
759 ) -> Dict[int, List[UnitVector3d]]:
760 """Compute the lists of unit vectors on the sphere that correspond to
761 the sky positions of detector corners.
763 Parameters
764 ----------
765 exposure : `DimensionRecord`
766 Dimension record for the exposure.
767 collections : Any, optional
768 Collections to be searched for raws and camera geometry, overriding
769 ``self.butler.collections``.
770 Can be any of the types supported by the ``collections`` argument
771 to butler construction.
773 Returns
774 -------
775 bounds : `dict`
776 Dictionary mapping detector ID to a list of unit vectors on the
777 sphere representing that detector's corners projected onto the sky.
778 """
779 if collections is None:
780 collections = self.butler.collections
781 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections)
782 if not versioned and self.config.requireVersionedCamera:
783 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.")
785 # Derive WCS from boresight information -- if available in registry
786 use_registry = True
787 try:
788 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees)
789 radec = lsst.geom.SpherePoint(
790 lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees),
791 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees),
792 )
793 except AttributeError:
794 use_registry = False
796 if use_registry:
797 if self.config.detectorId is None:
798 detectorId = next(camera.getIdIter())
799 else:
800 detectorId = self.config.detectorId
801 wcsDetector = camera[detectorId]
803 # Ask the raw formatter to create the relevant WCS
804 # This allows flips to be taken into account
805 instrument = self.getInstrument(exposure.instrument)
806 rawFormatter = instrument.getRawFormatter({"detector": detectorId})
807 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector)
809 else:
810 if self.config.detectorId is None:
811 wcsRefsIter = self.butler.registry.queryDatasets(
812 "raw.wcs", dataId=exposure.dataId, collections=collections
813 )
814 if not wcsRefsIter:
815 raise LookupError(
816 f"No raw.wcs datasets found for data ID {exposure.dataId} "
817 f"in collections {collections}."
818 )
819 wcsRef = next(iter(wcsRefsIter))
820 wcsDetector = camera[wcsRef.dataId["detector"]]
821 wcs = self.butler.getDirect(wcsRef)
822 else:
823 wcsDetector = camera[self.config.detectorId]
824 wcs = self.butler.get(
825 "raw.wcs",
826 dataId=exposure.dataId,
827 detector=self.config.detectorId,
828 collections=collections,
829 )
830 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform())
831 bounds = {}
832 for detector in camera:
833 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky)
834 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners()
835 bounds[detector.getId()] = [
836 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners)
837 ]
838 return bounds
840 def compute(
841 self, visit: VisitDefinitionData, *, collections: Any = None
842 ) -> Tuple[Region, Dict[int, Region]]:
843 # Docstring inherited from ComputeVisitRegionsTask.
844 if self.config.mergeExposures:
845 detectorBounds = defaultdict(list)
846 for exposure in visit.exposures:
847 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections)
848 for detectorId, bounds in exposureDetectorBounds.items():
849 detectorBounds[detectorId].extend(bounds)
850 else:
851 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections)
852 visitBounds = []
853 detectorRegions = {}
854 for detectorId, bounds in detectorBounds.items():
855 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds)
856 visitBounds.extend(bounds)
857 return ConvexPolygon.convexHull(visitBounds), detectorRegions