Coverage for python/lsst/obs/base/defineVisits.py : 30%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = [
25 "DefineVisitsConfig",
26 "DefineVisitsTask",
27 "GroupExposuresConfig",
28 "GroupExposuresTask",
29 "VisitDefinitionData",
30]
32from abc import ABCMeta, abstractmethod
33from collections import defaultdict
34import itertools
35import dataclasses
36from typing import Any, Dict, Iterable, List, Optional, Tuple
37from multiprocessing import Pool
39from lsst.daf.butler import (
40 Butler,
41 DataCoordinate,
42 DataId,
43 DimensionGraph,
44 DimensionRecord,
45 Timespan,
46)
48import lsst.geom
49from lsst.geom import Box2D
50from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable
51from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS
52from lsst.pipe.base import Task
53from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d
54from ._instrument import loadCamera, Instrument
57@dataclasses.dataclass
58class VisitDefinitionData:
59 """Struct representing a group of exposures that will be used to define a
60 visit.
61 """
63 instrument: str
64 """Name of the instrument this visit will be associated with.
65 """
67 id: int
68 """Integer ID of the visit.
70 This must be unique across all visit systems for the instrument.
71 """
73 name: str
74 """String name for the visit.
76 This must be unique across all visit systems for the instrument.
77 """
79 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list)
80 """Dimension records for the exposures that are part of this visit.
81 """
84@dataclasses.dataclass
85class _VisitRecords:
86 """Struct containing the dimension records associated with a visit.
87 """
89 visit: DimensionRecord
90 """Record for the 'visit' dimension itself.
91 """
93 visit_definition: List[DimensionRecord]
94 """Records for 'visit_definition', which relates 'visit' to 'exposure'.
95 """
97 visit_detector_region: List[DimensionRecord]
98 """Records for 'visit_detector_region', which associates the combination
99 of a 'visit' and a 'detector' with a region on the sky.
100 """
103class GroupExposuresConfig(Config):
104 pass
107class GroupExposuresTask(Task, metaclass=ABCMeta):
108 """Abstract base class for the subtask of `DefineVisitsTask` that is
109 responsible for grouping exposures into visits.
111 Subclasses should be registered with `GroupExposuresTask.registry` to
112 enable use by `DefineVisitsTask`, and should generally correspond to a
113 particular 'visit_system' dimension value. They are also responsible for
114 defining visit IDs and names that are unique across all visit systems in
115 use by an instrument.
117 Parameters
118 ----------
119 config : `GroupExposuresConfig`
120 Configuration information.
121 **kwargs
122 Additional keyword arguments forwarded to the `Task` constructor.
123 """
124 def __init__(self, config: GroupExposuresConfig, **kwargs: Any):
125 Task.__init__(self, config=config, **kwargs)
127 ConfigClass = GroupExposuresConfig
129 _DefaultName = "groupExposures"
131 registry = makeRegistry(
132 doc="Registry of algorithms for grouping exposures into visits.",
133 configBaseType=GroupExposuresConfig,
134 )
136 @abstractmethod
137 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
138 """Group the given exposures into visits.
140 Parameters
141 ----------
142 exposures : `list` [ `DimensionRecord` ]
143 DimensionRecords (for the 'exposure' dimension) describing the
144 exposures to group.
146 Returns
147 -------
148 visits : `Iterable` [ `VisitDefinitionData` ]
149 Structs identifying the visits and the exposures associated with
150 them. This may be an iterator or a container.
151 """
152 raise NotImplementedError()
154 @abstractmethod
155 def getVisitSystem(self) -> Tuple[int, str]:
156 """Return identifiers for the 'visit_system' dimension this
157 algorithm implements.
159 Returns
160 -------
161 id : `int`
162 Integer ID for the visit system (given an instrument).
163 name : `str`
164 Unique string identifier for the visit system (given an
165 instrument).
166 """
167 raise NotImplementedError()
170class ComputeVisitRegionsConfig(Config):
171 padding = Field(
172 dtype=int,
173 default=0,
174 doc=("Pad raw image bounding boxes with specified number of pixels "
175 "when calculating their (conservatively large) region on the "
176 "sky."),
177 )
180class ComputeVisitRegionsTask(Task, metaclass=ABCMeta):
181 """Abstract base class for the subtask of `DefineVisitsTask` that is
182 responsible for extracting spatial regions for visits and visit+detector
183 combinations.
185 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to
186 enable use by `DefineVisitsTask`.
188 Parameters
189 ----------
190 config : `ComputeVisitRegionsConfig`
191 Configuration information.
192 butler : `lsst.daf.butler.Butler`
193 The butler to use.
194 **kwargs
195 Additional keyword arguments forwarded to the `Task` constructor.
196 """
197 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any):
198 Task.__init__(self, config=config, **kwargs)
199 self.butler = butler
200 self.instrumentMap = {}
202 ConfigClass = ComputeVisitRegionsConfig
204 _DefaultName = "computeVisitRegions"
206 registry = makeRegistry(
207 doc=("Registry of algorithms for computing on-sky regions for visits "
208 "and visit+detector combinations."),
209 configBaseType=ComputeVisitRegionsConfig,
210 )
212 def getInstrument(self, instrumentName) -> Instrument:
213 """Retrieve an `~lsst.obs.base.Instrument` associated with this
214 instrument name.
216 Parameters
217 ----------
218 instrumentName : `str`
219 The name of the instrument.
221 Returns
222 -------
223 instrument : `~lsst.obs.base.Instrument`
224 The associated instrument object.
226 Notes
227 -----
228 The result is cached.
229 """
230 instrument = self.instrumentMap.get(instrumentName)
231 if instrument is None:
232 instrument = Instrument.fromName(instrumentName, self.butler.registry)
233 self.instrumentMap[instrumentName] = instrument
234 return instrument
236 @abstractmethod
237 def compute(self, visit: VisitDefinitionData, *, collections: Any = None
238 ) -> Tuple[Region, Dict[int, Region]]:
239 """Compute regions for the given visit and all detectors in that visit.
241 Parameters
242 ----------
243 visit : `VisitDefinitionData`
244 Struct describing the visit and the exposures associated with it.
245 collections : Any, optional
246 Collections to be searched for raws and camera geometry, overriding
247 ``self.butler.collections``.
248 Can be any of the types supported by the ``collections`` argument
249 to butler construction.
251 Returns
252 -------
253 visitRegion : `lsst.sphgeom.Region`
254 Region for the full visit.
255 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ]
256 Dictionary mapping detector ID to the region for that detector.
257 Should include all detectors in the visit.
258 """
259 raise NotImplementedError()
262class DefineVisitsConfig(Config):
263 groupExposures = GroupExposuresTask.registry.makeField(
264 doc="Algorithm for grouping exposures into visits.",
265 default="one-to-one",
266 )
267 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField(
268 doc="Algorithm from computing visit and visit+detector regions.",
269 default="single-raw-wcs",
270 )
271 ignoreNonScienceExposures = Field(
272 doc=("If True, silently ignore input exposures that do not have "
273 "observation_type=SCIENCE. If False, raise an exception if one "
274 "encountered."),
275 dtype=bool,
276 optional=False,
277 default=True,
278 )
281class DefineVisitsTask(Task):
282 """Driver Task for defining visits (and their spatial regions) in Gen3
283 Butler repositories.
285 Parameters
286 ----------
287 config : `DefineVisitsConfig`
288 Configuration for the task.
289 butler : `~lsst.daf.butler.Butler`
290 Writeable butler instance. Will be used to read `raw.wcs` and `camera`
291 datasets and insert/sync dimension data.
292 **kwargs
293 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task`
294 constructor.
296 Notes
297 -----
298 Each instance of `DefineVisitsTask` reads from / writes to the same Butler.
299 Each invocation of `DefineVisitsTask.run` processes an independent group of
300 exposures into one or more new vists, all belonging to the same visit
301 system and instrument.
303 The actual work of grouping exposures and computing regions is delegated
304 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`),
305 respectively. The defaults are to create one visit for every exposure,
306 and to use exactly one (arbitrary) detector-level raw dataset's WCS along
307 with camera geometry to compute regions for all detectors. Other
308 implementations can be created and configured for instruments for which
309 these choices are unsuitable (e.g. because visits and exposures are not
310 one-to-one, or because ``raw.wcs`` datasets for different detectors may not
311 be consistent with camera geomery).
313 It is not necessary in general to ingest all raws for an exposure before
314 defining a visit that includes the exposure; this depends entirely on the
315 `ComputeVisitRegionTask` subclass used. For the default configuration,
316 a single raw for each exposure is sufficient.
318 Defining the same visit the same way multiple times (e.g. via multiple
319 invocations of this task on the same exposures, with the same
320 configuration) is safe, but it may be inefficient, as most of the work must
321 be done before new visits can be compared to existing visits.
322 """
323 def __init__(self, config: Optional[DefineVisitsConfig] = None, *, butler: Butler, **kwargs: Any):
324 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here.
325 super().__init__(config, **kwargs)
326 self.butler = butler
327 self.universe = self.butler.registry.dimensions
328 self.makeSubtask("groupExposures")
329 self.makeSubtask("computeVisitRegions", butler=self.butler)
331 def _reduce_kwargs(self):
332 # Add extra parameters to pickle
333 return dict(**super()._reduce_kwargs(), butler=self.butler)
335 ConfigClass = DefineVisitsConfig
337 _DefaultName = "defineVisits"
339 def _buildVisitRecords(self, definition: VisitDefinitionData, *,
340 collections: Any = None) -> _VisitRecords:
341 """Build the DimensionRecords associated with a visit.
343 Parameters
344 ----------
345 definition : `VisitDefinition`
346 Struct with identifiers for the visit and records for its
347 constituent exposures.
348 collections : Any, optional
349 Collections to be searched for raws and camera geometry, overriding
350 ``self.butler.collections``.
351 Can be any of the types supported by the ``collections`` argument
352 to butler construction.
354 Results
355 -------
356 records : `_VisitRecords`
357 Struct containing DimensionRecords for the visit, including
358 associated dimension elements.
359 """
360 # Compute all regions.
361 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute(definition,
362 collections=collections)
363 # Aggregate other exposure quantities.
364 timespan = Timespan(
365 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)),
366 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)),
367 )
368 exposure_time = _reduceOrNone(sum, (e.exposure_time for e in definition.exposures))
369 physical_filter = _reduceOrNone(lambda a, b: a if a == b else None,
370 (e.physical_filter for e in definition.exposures))
371 target_name = _reduceOrNone(lambda a, b: a if a == b else None,
372 (e.target_name for e in definition.exposures))
373 science_program = _reduceOrNone(lambda a, b: a if a == b else None,
374 (e.science_program for e in definition.exposures))
376 # observing day for a visit is defined by the earliest observation
377 # of the visit
378 observing_day = _reduceOrNone(min, (e.day_obs for e in definition.exposures))
379 observation_reason = _reduceOrNone(lambda a, b: a if a == b else None,
380 (e.observation_reason for e in definition.exposures))
381 if observation_reason is None:
382 # Be explicit about there being multiple reasons
383 observation_reason = "various"
385 # Use the mean zenith angle as an approximation
386 zenith_angle = _reduceOrNone(sum, (e.zenith_angle for e in definition.exposures))
387 if zenith_angle is not None:
388 zenith_angle /= len(definition.exposures)
390 # Construct the actual DimensionRecords.
391 return _VisitRecords(
392 visit=self.universe["visit"].RecordClass(
393 instrument=definition.instrument,
394 id=definition.id,
395 name=definition.name,
396 physical_filter=physical_filter,
397 target_name=target_name,
398 science_program=science_program,
399 observation_reason=observation_reason,
400 day_obs=observing_day,
401 zenith_angle=zenith_angle,
402 visit_system=self.groupExposures.getVisitSystem()[0],
403 exposure_time=exposure_time,
404 timespan=timespan,
405 region=visitRegion,
406 # TODO: no seeing value in exposure dimension records, so we
407 # can't set that here. But there are many other columns that
408 # both dimensions should probably have as well.
409 ),
410 visit_definition=[
411 self.universe["visit_definition"].RecordClass(
412 instrument=definition.instrument,
413 visit=definition.id,
414 exposure=exposure.id,
415 visit_system=self.groupExposures.getVisitSystem()[0],
416 )
417 for exposure in definition.exposures
418 ],
419 visit_detector_region=[
420 self.universe["visit_detector_region"].RecordClass(
421 instrument=definition.instrument,
422 visit=definition.id,
423 detector=detectorId,
424 region=detectorRegion,
425 )
426 for detectorId, detectorRegion in visitDetectorRegions.items()
427 ]
428 )
430 def _expandExposureId(self, dataId: DataId) -> DataCoordinate:
431 """Return the expanded version of an exposure ID.
433 A private method to allow ID expansion in a pool without resorting
434 to local callables.
436 Parameters
437 ----------
438 dataId : `dict` or `DataCoordinate`
439 Exposure-level data ID.
441 Returns
442 -------
443 expanded : `DataCoordinate`
444 A data ID that includes full metadata for all exposure dimensions.
445 """
446 dimensions = DimensionGraph(self.universe, names=["exposure"])
447 return self.butler.registry.expandDataId(dataId, graph=dimensions)
449 def _buildVisitRecordsSingle(self, args) -> _VisitRecords:
450 """Build the DimensionRecords associated with a visit and collection.
452 A wrapper for `_buildVisitRecords` to allow it to be run as part of
453 a pool without resorting to local callables.
455 Parameters
456 ----------
457 args : `tuple` [`VisitDefinition`, any]
458 A tuple consisting of the ``definition`` and ``collections``
459 arguments to `_buildVisitRecords`, in that order.
461 Results
462 -------
463 records : `_VisitRecords`
464 Struct containing DimensionRecords for the visit, including
465 associated dimension elements.
466 """
467 return self._buildVisitRecords(args[0], collections=args[1])
469 def run(self, dataIds: Iterable[DataId], *,
470 pool: Optional[Pool] = None,
471 processes: int = 1,
472 collections: Optional[str] = None):
473 """Add visit definitions to the registry for the given exposures.
475 Parameters
476 ----------
477 dataIds : `Iterable` [ `dict` or `DataCoordinate` ]
478 Exposure-level data IDs. These must all correspond to the same
479 instrument, and are expected to be on-sky science exposures.
480 pool : `multiprocessing.Pool`, optional
481 If not `None`, a process pool with which to parallelize some
482 operations.
483 processes : `int`, optional
484 The number of processes to use. Ignored if ``pool`` is not `None`.
485 collections : Any, optional
486 Collections to be searched for raws and camera geometry, overriding
487 ``self.butler.collections``.
488 Can be any of the types supported by the ``collections`` argument
489 to butler construction.
491 Raises
492 ------
493 lsst.daf.butler.registry.ConflictingDefinitionError
494 Raised if a visit ID conflict is detected and the existing visit
495 differs from the new one.
496 """
497 # Set up multiprocessing, if desired.
498 if pool is None and processes > 1:
499 pool = Pool(processes)
500 mapFunc = map if pool is None else pool.imap_unordered
501 # Normalize, expand, and deduplicate data IDs.
502 self.log.info("Preprocessing data IDs.")
503 dataIds = set(mapFunc(self._expandExposureId, dataIds))
504 if not dataIds:
505 raise RuntimeError("No exposures given.")
506 # Extract exposure DimensionRecords, check that there's only one
507 # instrument in play, and check for non-science exposures.
508 exposures = []
509 instruments = set()
510 for dataId in dataIds:
511 record = dataId.records["exposure"]
512 if record.observation_type != "science":
513 if self.config.ignoreNonScienceExposures:
514 continue
515 else:
516 raise RuntimeError(f"Input exposure {dataId} has observation_type "
517 f"{record.observation_type}, not 'science'.")
518 instruments.add(dataId["instrument"])
519 exposures.append(record)
520 if not exposures:
521 self.log.info("No science exposures found after filtering.")
522 return
523 if len(instruments) > 1:
524 raise RuntimeError(
525 f"All data IDs passed to DefineVisitsTask.run must be "
526 f"from the same instrument; got {instruments}."
527 )
528 instrument, = instruments
529 # Ensure the visit_system our grouping algorithm uses is in the
530 # registry, if it wasn't already.
531 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem()
532 self.log.info("Registering visit_system %d: %s.", visitSystemId, visitSystemName)
533 self.butler.registry.syncDimensionData(
534 "visit_system",
535 {"instrument": instrument, "id": visitSystemId, "name": visitSystemName}
536 )
537 # Group exposures into visits, delegating to subtask.
538 self.log.info("Grouping %d exposure(s) into visits.", len(exposures))
539 definitions = list(self.groupExposures.group(exposures))
540 # Compute regions and build DimensionRecords for each visit.
541 # This is the only parallel step, but it _should_ be the most expensive
542 # one (unless DB operations are slow).
543 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions))
544 allRecords = mapFunc(self._buildVisitRecordsSingle,
545 zip(definitions, itertools.repeat(collections)))
546 # Iterate over visits and insert dimension data, one transaction per
547 # visit. If a visit already exists, we skip all other inserts.
548 for visitRecords in allRecords:
549 with self.butler.registry.transaction():
550 if self.butler.registry.syncDimensionData("visit", visitRecords.visit):
551 self.butler.registry.insertDimensionData("visit_definition",
552 *visitRecords.visit_definition)
553 self.butler.registry.insertDimensionData("visit_detector_region",
554 *visitRecords.visit_detector_region)
557def _reduceOrNone(func, iterable):
558 """Apply a binary function to pairs of elements in an iterable until a
559 single value is returned, but return `None` if any element is `None` or
560 there are no elements.
561 """
562 r = None
563 for v in iterable:
564 if v is None:
565 return None
566 if r is None:
567 r = v
568 else:
569 r = func(r, v)
570 return r
573class _GroupExposuresOneToOneConfig(GroupExposuresConfig):
574 visitSystemId = Field(
575 doc=("Integer ID of the visit_system implemented by this grouping "
576 "algorithm."),
577 dtype=int,
578 default=0,
579 )
580 visitSystemName = Field(
581 doc=("String name of the visit_system implemented by this grouping "
582 "algorithm."),
583 dtype=str,
584 default="one-to-one",
585 )
588@registerConfigurable("one-to-one", GroupExposuresTask.registry)
589class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta):
590 """An exposure grouping algorithm that simply defines one visit for each
591 exposure, reusing the exposures identifiers for the visit.
592 """
594 ConfigClass = _GroupExposuresOneToOneConfig
596 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
597 # Docstring inherited from GroupExposuresTask.
598 for exposure in exposures:
599 yield VisitDefinitionData(
600 instrument=exposure.instrument,
601 id=exposure.id,
602 name=exposure.obs_id,
603 exposures=[exposure],
604 )
606 def getVisitSystem(self) -> Tuple[int, str]:
607 # Docstring inherited from GroupExposuresTask.
608 return (self.config.visitSystemId, self.config.visitSystemName)
611class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig):
612 visitSystemId = Field(
613 doc=("Integer ID of the visit_system implemented by this grouping "
614 "algorithm."),
615 dtype=int,
616 default=1,
617 )
618 visitSystemName = Field(
619 doc=("String name of the visit_system implemented by this grouping "
620 "algorithm."),
621 dtype=str,
622 default="by-group-metadata",
623 )
626@registerConfigurable("by-group-metadata", GroupExposuresTask.registry)
627class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta):
628 """An exposure grouping algorithm that uses exposure.group_name and
629 exposure.group_id.
631 This algorithm _assumes_ exposure.group_id (generally populated from
632 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique,
633 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't,
634 it will be impossible to ever use both this grouping algorithm and the
635 one-to-one algorithm for a particular camera in the same data repository.
636 """
638 ConfigClass = _GroupExposuresByGroupMetadataConfig
640 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
641 # Docstring inherited from GroupExposuresTask.
642 groups = defaultdict(list)
643 for exposure in exposures:
644 groups[exposure.group_name].append(exposure)
645 for visitName, exposuresInGroup in groups.items():
646 instrument = exposuresInGroup[0].instrument
647 visitId = exposuresInGroup[0].group_id
648 assert all(e.group_id == visitId for e in exposuresInGroup), \
649 "Grouping by exposure.group_name does not yield consistent group IDs"
650 yield VisitDefinitionData(instrument=instrument, id=visitId, name=visitName,
651 exposures=exposuresInGroup)
653 def getVisitSystem(self) -> Tuple[int, str]:
654 # Docstring inherited from GroupExposuresTask.
655 return (self.config.visitSystemId, self.config.visitSystemName)
658class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig):
659 mergeExposures = Field(
660 doc=("If True, merge per-detector regions over all exposures in a "
661 "visit (via convex hull) instead of using the first exposure and "
662 "assuming its regions are valid for all others."),
663 dtype=bool,
664 default=False,
665 )
666 detectorId = Field(
667 doc=("Load the WCS for the detector with this ID. If None, use an "
668 "arbitrary detector (the first found in a query of the data "
669 "repository for each exposure (or all exposures, if "
670 "mergeExposures is True)."),
671 dtype=int,
672 optional=True,
673 default=None
674 )
675 requireVersionedCamera = Field(
676 doc=("If True, raise LookupError if version camera geometry cannot be "
677 "loaded for an exposure. If False, use the nominal camera from "
678 "the Instrument class instead."),
679 dtype=bool,
680 optional=False,
681 default=False,
682 )
685@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry)
686class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask):
687 """A visit region calculator that uses a single raw WCS and a camera to
688 project the bounding boxes of all detectors onto the sky, relating
689 different detectors by their positions in focal plane coordinates.
691 Notes
692 -----
693 Most instruments should have their raw WCSs determined from a combination
694 of boresight angle, rotator angle, and camera geometry, and hence this
695 algorithm should produce stable results regardless of which detector the
696 raw corresponds to. If this is not the case (e.g. because a per-file FITS
697 WCS is used instead), either the ID of the detector should be fixed (see
698 the ``detectorId`` config parameter) or a different algorithm used.
699 """
701 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig
703 def computeExposureBounds(self, exposure: DimensionRecord, *, collections: Any = None
704 ) -> Dict[int, List[UnitVector3d]]:
705 """Compute the lists of unit vectors on the sphere that correspond to
706 the sky positions of detector corners.
708 Parameters
709 ----------
710 exposure : `DimensionRecord`
711 Dimension record for the exposure.
712 collections : Any, optional
713 Collections to be searched for raws and camera geometry, overriding
714 ``self.butler.collections``.
715 Can be any of the types supported by the ``collections`` argument
716 to butler construction.
718 Returns
719 -------
720 bounds : `dict`
721 Dictionary mapping detector ID to a list of unit vectors on the
722 sphere representing that detector's corners projected onto the sky.
723 """
724 if collections is None:
725 collections = self.butler.collections
726 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections)
727 if not versioned and self.config.requireVersionedCamera:
728 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.")
730 # Derive WCS from boresight information -- if available in registry
731 use_registry = True
732 try:
733 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees)
734 radec = lsst.geom.SpherePoint(lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees),
735 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees))
736 except AttributeError:
737 use_registry = False
739 if use_registry:
740 if self.config.detectorId is None:
741 detectorId = next(camera.getIdIter())
742 else:
743 detectorId = self.config.detectorId
744 wcsDetector = camera[detectorId]
746 # Ask the raw formatter to create the relevant WCS
747 # This allows flips to be taken into account
748 instrument = self.getInstrument(exposure.instrument)
749 rawFormatter = instrument.getRawFormatter({"detector": detectorId})
750 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector)
752 else:
753 if self.config.detectorId is None:
754 wcsRefsIter = self.butler.registry.queryDatasets("raw.wcs", dataId=exposure.dataId,
755 collections=collections)
756 if not wcsRefsIter:
757 raise LookupError(f"No raw.wcs datasets found for data ID {exposure.dataId} "
758 f"in collections {collections}.")
759 wcsRef = next(iter(wcsRefsIter))
760 wcsDetector = camera[wcsRef.dataId["detector"]]
761 wcs = self.butler.getDirect(wcsRef)
762 else:
763 wcsDetector = camera[self.config.detectorId]
764 wcs = self.butler.get("raw.wcs", dataId=exposure.dataId, detector=self.config.detectorId,
765 collections=collections)
766 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform())
767 bounds = {}
768 for detector in camera:
769 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky)
770 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners()
771 bounds[detector.getId()] = [
772 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners)
773 ]
774 return bounds
776 def compute(self, visit: VisitDefinitionData, *, collections: Any = None
777 ) -> Tuple[Region, Dict[int, Region]]:
778 # Docstring inherited from ComputeVisitRegionsTask.
779 if self.config.mergeExposures:
780 detectorBounds = defaultdict(list)
781 for exposure in visit.exposures:
782 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections)
783 for detectorId, bounds in exposureDetectorBounds.items():
784 detectorBounds[detectorId].extend(bounds)
785 else:
786 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections)
787 visitBounds = []
788 detectorRegions = {}
789 for detectorId, bounds in detectorBounds.items():
790 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds)
791 visitBounds.extend(bounds)
792 return ConvexPolygon.convexHull(visitBounds), detectorRegions