Coverage for python/lsst/obs/base/defineVisits.py : 31%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = [
25 "DefineVisitsConfig",
26 "DefineVisitsTask",
27 "GroupExposuresConfig",
28 "GroupExposuresTask",
29 "VisitDefinitionData",
30]
32from abc import ABCMeta, abstractmethod
33from collections import defaultdict
34import itertools
35import dataclasses
36from typing import Any, Dict, Iterable, List, Optional, Tuple
37from multiprocessing import Pool
39from lsst.daf.butler import (
40 Butler,
41 DataCoordinate,
42 DataId,
43 DimensionGraph,
44 DimensionRecord,
45 Progress,
46 Timespan,
47)
49import lsst.geom
50from lsst.geom import Box2D
51from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable
52from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS
53from lsst.pipe.base import Task
54from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d
55from ._instrument import loadCamera, Instrument
58@dataclasses.dataclass
59class VisitDefinitionData:
60 """Struct representing a group of exposures that will be used to define a
61 visit.
62 """
64 instrument: str
65 """Name of the instrument this visit will be associated with.
66 """
68 id: int
69 """Integer ID of the visit.
71 This must be unique across all visit systems for the instrument.
72 """
74 name: str
75 """String name for the visit.
77 This must be unique across all visit systems for the instrument.
78 """
80 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list)
81 """Dimension records for the exposures that are part of this visit.
82 """
85@dataclasses.dataclass
86class _VisitRecords:
87 """Struct containing the dimension records associated with a visit.
88 """
90 visit: DimensionRecord
91 """Record for the 'visit' dimension itself.
92 """
94 visit_definition: List[DimensionRecord]
95 """Records for 'visit_definition', which relates 'visit' to 'exposure'.
96 """
98 visit_detector_region: List[DimensionRecord]
99 """Records for 'visit_detector_region', which associates the combination
100 of a 'visit' and a 'detector' with a region on the sky.
101 """
104class GroupExposuresConfig(Config):
105 pass
108class GroupExposuresTask(Task, metaclass=ABCMeta):
109 """Abstract base class for the subtask of `DefineVisitsTask` that is
110 responsible for grouping exposures into visits.
112 Subclasses should be registered with `GroupExposuresTask.registry` to
113 enable use by `DefineVisitsTask`, and should generally correspond to a
114 particular 'visit_system' dimension value. They are also responsible for
115 defining visit IDs and names that are unique across all visit systems in
116 use by an instrument.
118 Parameters
119 ----------
120 config : `GroupExposuresConfig`
121 Configuration information.
122 **kwargs
123 Additional keyword arguments forwarded to the `Task` constructor.
124 """
125 def __init__(self, config: GroupExposuresConfig, **kwargs: Any):
126 Task.__init__(self, config=config, **kwargs)
128 ConfigClass = GroupExposuresConfig
130 _DefaultName = "groupExposures"
132 registry = makeRegistry(
133 doc="Registry of algorithms for grouping exposures into visits.",
134 configBaseType=GroupExposuresConfig,
135 )
137 @abstractmethod
138 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
139 """Group the given exposures into visits.
141 Parameters
142 ----------
143 exposures : `list` [ `DimensionRecord` ]
144 DimensionRecords (for the 'exposure' dimension) describing the
145 exposures to group.
147 Returns
148 -------
149 visits : `Iterable` [ `VisitDefinitionData` ]
150 Structs identifying the visits and the exposures associated with
151 them. This may be an iterator or a container.
152 """
153 raise NotImplementedError()
155 @abstractmethod
156 def getVisitSystem(self) -> Tuple[int, str]:
157 """Return identifiers for the 'visit_system' dimension this
158 algorithm implements.
160 Returns
161 -------
162 id : `int`
163 Integer ID for the visit system (given an instrument).
164 name : `str`
165 Unique string identifier for the visit system (given an
166 instrument).
167 """
168 raise NotImplementedError()
171class ComputeVisitRegionsConfig(Config):
172 padding = Field(
173 dtype=int,
174 default=250,
175 doc=("Pad raw image bounding boxes with specified number of pixels "
176 "when calculating their (conservatively large) region on the "
177 "sky. Note that the config value for pixelMargin of the "
178 "reference object loaders in meas_algorithms should be <= "
179 "the value set here."),
180 )
183class ComputeVisitRegionsTask(Task, metaclass=ABCMeta):
184 """Abstract base class for the subtask of `DefineVisitsTask` that is
185 responsible for extracting spatial regions for visits and visit+detector
186 combinations.
188 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to
189 enable use by `DefineVisitsTask`.
191 Parameters
192 ----------
193 config : `ComputeVisitRegionsConfig`
194 Configuration information.
195 butler : `lsst.daf.butler.Butler`
196 The butler to use.
197 **kwargs
198 Additional keyword arguments forwarded to the `Task` constructor.
199 """
200 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any):
201 Task.__init__(self, config=config, **kwargs)
202 self.butler = butler
203 self.instrumentMap = {}
205 ConfigClass = ComputeVisitRegionsConfig
207 _DefaultName = "computeVisitRegions"
209 registry = makeRegistry(
210 doc=("Registry of algorithms for computing on-sky regions for visits "
211 "and visit+detector combinations."),
212 configBaseType=ComputeVisitRegionsConfig,
213 )
215 def getInstrument(self, instrumentName) -> Instrument:
216 """Retrieve an `~lsst.obs.base.Instrument` associated with this
217 instrument name.
219 Parameters
220 ----------
221 instrumentName : `str`
222 The name of the instrument.
224 Returns
225 -------
226 instrument : `~lsst.obs.base.Instrument`
227 The associated instrument object.
229 Notes
230 -----
231 The result is cached.
232 """
233 instrument = self.instrumentMap.get(instrumentName)
234 if instrument is None:
235 instrument = Instrument.fromName(instrumentName, self.butler.registry)
236 self.instrumentMap[instrumentName] = instrument
237 return instrument
239 @abstractmethod
240 def compute(self, visit: VisitDefinitionData, *, collections: Any = None
241 ) -> Tuple[Region, Dict[int, Region]]:
242 """Compute regions for the given visit and all detectors in that visit.
244 Parameters
245 ----------
246 visit : `VisitDefinitionData`
247 Struct describing the visit and the exposures associated with it.
248 collections : Any, optional
249 Collections to be searched for raws and camera geometry, overriding
250 ``self.butler.collections``.
251 Can be any of the types supported by the ``collections`` argument
252 to butler construction.
254 Returns
255 -------
256 visitRegion : `lsst.sphgeom.Region`
257 Region for the full visit.
258 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ]
259 Dictionary mapping detector ID to the region for that detector.
260 Should include all detectors in the visit.
261 """
262 raise NotImplementedError()
265class DefineVisitsConfig(Config):
266 groupExposures = GroupExposuresTask.registry.makeField(
267 doc="Algorithm for grouping exposures into visits.",
268 default="one-to-one",
269 )
270 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField(
271 doc="Algorithm from computing visit and visit+detector regions.",
272 default="single-raw-wcs",
273 )
274 ignoreNonScienceExposures = Field(
275 doc=("If True, silently ignore input exposures that do not have "
276 "observation_type=SCIENCE. If False, raise an exception if one "
277 "encountered."),
278 dtype=bool,
279 optional=False,
280 default=True,
281 )
284class DefineVisitsTask(Task):
285 """Driver Task for defining visits (and their spatial regions) in Gen3
286 Butler repositories.
288 Parameters
289 ----------
290 config : `DefineVisitsConfig`
291 Configuration for the task.
292 butler : `~lsst.daf.butler.Butler`
293 Writeable butler instance. Will be used to read `raw.wcs` and `camera`
294 datasets and insert/sync dimension data.
295 **kwargs
296 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task`
297 constructor.
299 Notes
300 -----
301 Each instance of `DefineVisitsTask` reads from / writes to the same Butler.
302 Each invocation of `DefineVisitsTask.run` processes an independent group of
303 exposures into one or more new vists, all belonging to the same visit
304 system and instrument.
306 The actual work of grouping exposures and computing regions is delegated
307 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`),
308 respectively. The defaults are to create one visit for every exposure,
309 and to use exactly one (arbitrary) detector-level raw dataset's WCS along
310 with camera geometry to compute regions for all detectors. Other
311 implementations can be created and configured for instruments for which
312 these choices are unsuitable (e.g. because visits and exposures are not
313 one-to-one, or because ``raw.wcs`` datasets for different detectors may not
314 be consistent with camera geomery).
316 It is not necessary in general to ingest all raws for an exposure before
317 defining a visit that includes the exposure; this depends entirely on the
318 `ComputeVisitRegionTask` subclass used. For the default configuration,
319 a single raw for each exposure is sufficient.
321 Defining the same visit the same way multiple times (e.g. via multiple
322 invocations of this task on the same exposures, with the same
323 configuration) is safe, but it may be inefficient, as most of the work must
324 be done before new visits can be compared to existing visits.
325 """
326 def __init__(self, config: Optional[DefineVisitsConfig] = None, *, butler: Butler, **kwargs: Any):
327 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here.
328 super().__init__(config, **kwargs)
329 self.butler = butler
330 self.universe = self.butler.registry.dimensions
331 self.progress = Progress("obs.base.DefineVisitsTask")
332 self.makeSubtask("groupExposures")
333 self.makeSubtask("computeVisitRegions", butler=self.butler)
335 def _reduce_kwargs(self):
336 # Add extra parameters to pickle
337 return dict(**super()._reduce_kwargs(), butler=self.butler)
339 ConfigClass = DefineVisitsConfig
341 _DefaultName = "defineVisits"
343 def _buildVisitRecords(self, definition: VisitDefinitionData, *,
344 collections: Any = None) -> _VisitRecords:
345 """Build the DimensionRecords associated with a visit.
347 Parameters
348 ----------
349 definition : `VisitDefinition`
350 Struct with identifiers for the visit and records for its
351 constituent exposures.
352 collections : Any, optional
353 Collections to be searched for raws and camera geometry, overriding
354 ``self.butler.collections``.
355 Can be any of the types supported by the ``collections`` argument
356 to butler construction.
358 Results
359 -------
360 records : `_VisitRecords`
361 Struct containing DimensionRecords for the visit, including
362 associated dimension elements.
363 """
364 # Compute all regions.
365 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute(definition,
366 collections=collections)
367 # Aggregate other exposure quantities.
368 timespan = Timespan(
369 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)),
370 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)),
371 )
372 exposure_time = _reduceOrNone(sum, (e.exposure_time for e in definition.exposures))
373 physical_filter = _reduceOrNone(lambda a, b: a if a == b else None,
374 (e.physical_filter for e in definition.exposures))
375 target_name = _reduceOrNone(lambda a, b: a if a == b else None,
376 (e.target_name for e in definition.exposures))
377 science_program = _reduceOrNone(lambda a, b: a if a == b else None,
378 (e.science_program for e in definition.exposures))
380 # observing day for a visit is defined by the earliest observation
381 # of the visit
382 observing_day = _reduceOrNone(min, (e.day_obs for e in definition.exposures))
383 observation_reason = _reduceOrNone(lambda a, b: a if a == b else None,
384 (e.observation_reason for e in definition.exposures))
385 if observation_reason is None:
386 # Be explicit about there being multiple reasons
387 observation_reason = "various"
389 # Use the mean zenith angle as an approximation
390 zenith_angle = _reduceOrNone(sum, (e.zenith_angle for e in definition.exposures))
391 if zenith_angle is not None:
392 zenith_angle /= len(definition.exposures)
394 # Construct the actual DimensionRecords.
395 return _VisitRecords(
396 visit=self.universe["visit"].RecordClass(
397 instrument=definition.instrument,
398 id=definition.id,
399 name=definition.name,
400 physical_filter=physical_filter,
401 target_name=target_name,
402 science_program=science_program,
403 observation_reason=observation_reason,
404 day_obs=observing_day,
405 zenith_angle=zenith_angle,
406 visit_system=self.groupExposures.getVisitSystem()[0],
407 exposure_time=exposure_time,
408 timespan=timespan,
409 region=visitRegion,
410 # TODO: no seeing value in exposure dimension records, so we
411 # can't set that here. But there are many other columns that
412 # both dimensions should probably have as well.
413 ),
414 visit_definition=[
415 self.universe["visit_definition"].RecordClass(
416 instrument=definition.instrument,
417 visit=definition.id,
418 exposure=exposure.id,
419 visit_system=self.groupExposures.getVisitSystem()[0],
420 )
421 for exposure in definition.exposures
422 ],
423 visit_detector_region=[
424 self.universe["visit_detector_region"].RecordClass(
425 instrument=definition.instrument,
426 visit=definition.id,
427 detector=detectorId,
428 region=detectorRegion,
429 )
430 for detectorId, detectorRegion in visitDetectorRegions.items()
431 ]
432 )
434 def _expandExposureId(self, dataId: DataId) -> DataCoordinate:
435 """Return the expanded version of an exposure ID.
437 A private method to allow ID expansion in a pool without resorting
438 to local callables.
440 Parameters
441 ----------
442 dataId : `dict` or `DataCoordinate`
443 Exposure-level data ID.
445 Returns
446 -------
447 expanded : `DataCoordinate`
448 A data ID that includes full metadata for all exposure dimensions.
449 """
450 dimensions = DimensionGraph(self.universe, names=["exposure"])
451 return self.butler.registry.expandDataId(dataId, graph=dimensions)
453 def _buildVisitRecordsSingle(self, args) -> _VisitRecords:
454 """Build the DimensionRecords associated with a visit and collection.
456 A wrapper for `_buildVisitRecords` to allow it to be run as part of
457 a pool without resorting to local callables.
459 Parameters
460 ----------
461 args : `tuple` [`VisitDefinition`, any]
462 A tuple consisting of the ``definition`` and ``collections``
463 arguments to `_buildVisitRecords`, in that order.
465 Results
466 -------
467 records : `_VisitRecords`
468 Struct containing DimensionRecords for the visit, including
469 associated dimension elements.
470 """
471 return self._buildVisitRecords(args[0], collections=args[1])
473 def run(self, dataIds: Iterable[DataId], *,
474 pool: Optional[Pool] = None,
475 processes: int = 1,
476 collections: Optional[str] = None):
477 """Add visit definitions to the registry for the given exposures.
479 Parameters
480 ----------
481 dataIds : `Iterable` [ `dict` or `DataCoordinate` ]
482 Exposure-level data IDs. These must all correspond to the same
483 instrument, and are expected to be on-sky science exposures.
484 pool : `multiprocessing.Pool`, optional
485 If not `None`, a process pool with which to parallelize some
486 operations.
487 processes : `int`, optional
488 The number of processes to use. Ignored if ``pool`` is not `None`.
489 collections : Any, optional
490 Collections to be searched for raws and camera geometry, overriding
491 ``self.butler.collections``.
492 Can be any of the types supported by the ``collections`` argument
493 to butler construction.
495 Raises
496 ------
497 lsst.daf.butler.registry.ConflictingDefinitionError
498 Raised if a visit ID conflict is detected and the existing visit
499 differs from the new one.
500 """
501 # Set up multiprocessing, if desired.
502 if pool is None and processes > 1:
503 pool = Pool(processes)
504 mapFunc = map if pool is None else pool.imap_unordered
505 # Normalize, expand, and deduplicate data IDs.
506 self.log.info("Preprocessing data IDs.")
507 dataIds = set(mapFunc(self._expandExposureId, dataIds))
508 if not dataIds:
509 raise RuntimeError("No exposures given.")
510 # Extract exposure DimensionRecords, check that there's only one
511 # instrument in play, and check for non-science exposures.
512 exposures = []
513 instruments = set()
514 for dataId in dataIds:
515 record = dataId.records["exposure"]
516 if record.observation_type != "science":
517 if self.config.ignoreNonScienceExposures:
518 continue
519 else:
520 raise RuntimeError(f"Input exposure {dataId} has observation_type "
521 f"{record.observation_type}, not 'science'.")
522 instruments.add(dataId["instrument"])
523 exposures.append(record)
524 if not exposures:
525 self.log.info("No science exposures found after filtering.")
526 return
527 if len(instruments) > 1:
528 raise RuntimeError(
529 f"All data IDs passed to DefineVisitsTask.run must be "
530 f"from the same instrument; got {instruments}."
531 )
532 instrument, = instruments
533 # Ensure the visit_system our grouping algorithm uses is in the
534 # registry, if it wasn't already.
535 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem()
536 self.log.info("Registering visit_system %d: %s.", visitSystemId, visitSystemName)
537 self.butler.registry.syncDimensionData(
538 "visit_system",
539 {"instrument": instrument, "id": visitSystemId, "name": visitSystemName}
540 )
541 # Group exposures into visits, delegating to subtask.
542 self.log.info("Grouping %d exposure(s) into visits.", len(exposures))
543 definitions = list(self.groupExposures.group(exposures))
544 # Compute regions and build DimensionRecords for each visit.
545 # This is the only parallel step, but it _should_ be the most expensive
546 # one (unless DB operations are slow).
547 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions))
548 allRecords = mapFunc(self._buildVisitRecordsSingle,
549 zip(definitions, itertools.repeat(collections)))
550 # Iterate over visits and insert dimension data, one transaction per
551 # visit. If a visit already exists, we skip all other inserts.
552 for visitRecords in self.progress.wrap(allRecords, total=len(definitions),
553 desc="Computing regions and inserting visits"):
554 with self.butler.registry.transaction():
555 if self.butler.registry.syncDimensionData("visit", visitRecords.visit):
556 self.butler.registry.insertDimensionData("visit_definition",
557 *visitRecords.visit_definition)
558 self.butler.registry.insertDimensionData("visit_detector_region",
559 *visitRecords.visit_detector_region)
562def _reduceOrNone(func, iterable):
563 """Apply a binary function to pairs of elements in an iterable until a
564 single value is returned, but return `None` if any element is `None` or
565 there are no elements.
566 """
567 r = None
568 for v in iterable:
569 if v is None:
570 return None
571 if r is None:
572 r = v
573 else:
574 r = func(r, v)
575 return r
578class _GroupExposuresOneToOneConfig(GroupExposuresConfig):
579 visitSystemId = Field(
580 doc=("Integer ID of the visit_system implemented by this grouping "
581 "algorithm."),
582 dtype=int,
583 default=0,
584 )
585 visitSystemName = Field(
586 doc=("String name of the visit_system implemented by this grouping "
587 "algorithm."),
588 dtype=str,
589 default="one-to-one",
590 )
593@registerConfigurable("one-to-one", GroupExposuresTask.registry)
594class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta):
595 """An exposure grouping algorithm that simply defines one visit for each
596 exposure, reusing the exposures identifiers for the visit.
597 """
599 ConfigClass = _GroupExposuresOneToOneConfig
601 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
602 # Docstring inherited from GroupExposuresTask.
603 for exposure in exposures:
604 yield VisitDefinitionData(
605 instrument=exposure.instrument,
606 id=exposure.id,
607 name=exposure.obs_id,
608 exposures=[exposure],
609 )
611 def getVisitSystem(self) -> Tuple[int, str]:
612 # Docstring inherited from GroupExposuresTask.
613 return (self.config.visitSystemId, self.config.visitSystemName)
616class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig):
617 visitSystemId = Field(
618 doc=("Integer ID of the visit_system implemented by this grouping "
619 "algorithm."),
620 dtype=int,
621 default=1,
622 )
623 visitSystemName = Field(
624 doc=("String name of the visit_system implemented by this grouping "
625 "algorithm."),
626 dtype=str,
627 default="by-group-metadata",
628 )
631@registerConfigurable("by-group-metadata", GroupExposuresTask.registry)
632class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta):
633 """An exposure grouping algorithm that uses exposure.group_name and
634 exposure.group_id.
636 This algorithm _assumes_ exposure.group_id (generally populated from
637 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique,
638 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't,
639 it will be impossible to ever use both this grouping algorithm and the
640 one-to-one algorithm for a particular camera in the same data repository.
641 """
643 ConfigClass = _GroupExposuresByGroupMetadataConfig
645 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
646 # Docstring inherited from GroupExposuresTask.
647 groups = defaultdict(list)
648 for exposure in exposures:
649 groups[exposure.group_name].append(exposure)
650 for visitName, exposuresInGroup in groups.items():
651 instrument = exposuresInGroup[0].instrument
652 visitId = exposuresInGroup[0].group_id
653 assert all(e.group_id == visitId for e in exposuresInGroup), \
654 "Grouping by exposure.group_name does not yield consistent group IDs"
655 yield VisitDefinitionData(instrument=instrument, id=visitId, name=visitName,
656 exposures=exposuresInGroup)
658 def getVisitSystem(self) -> Tuple[int, str]:
659 # Docstring inherited from GroupExposuresTask.
660 return (self.config.visitSystemId, self.config.visitSystemName)
663class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig):
664 mergeExposures = Field(
665 doc=("If True, merge per-detector regions over all exposures in a "
666 "visit (via convex hull) instead of using the first exposure and "
667 "assuming its regions are valid for all others."),
668 dtype=bool,
669 default=False,
670 )
671 detectorId = Field(
672 doc=("Load the WCS for the detector with this ID. If None, use an "
673 "arbitrary detector (the first found in a query of the data "
674 "repository for each exposure (or all exposures, if "
675 "mergeExposures is True)."),
676 dtype=int,
677 optional=True,
678 default=None
679 )
680 requireVersionedCamera = Field(
681 doc=("If True, raise LookupError if version camera geometry cannot be "
682 "loaded for an exposure. If False, use the nominal camera from "
683 "the Instrument class instead."),
684 dtype=bool,
685 optional=False,
686 default=False,
687 )
690@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry)
691class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask):
692 """A visit region calculator that uses a single raw WCS and a camera to
693 project the bounding boxes of all detectors onto the sky, relating
694 different detectors by their positions in focal plane coordinates.
696 Notes
697 -----
698 Most instruments should have their raw WCSs determined from a combination
699 of boresight angle, rotator angle, and camera geometry, and hence this
700 algorithm should produce stable results regardless of which detector the
701 raw corresponds to. If this is not the case (e.g. because a per-file FITS
702 WCS is used instead), either the ID of the detector should be fixed (see
703 the ``detectorId`` config parameter) or a different algorithm used.
704 """
706 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig
708 def computeExposureBounds(self, exposure: DimensionRecord, *, collections: Any = None
709 ) -> Dict[int, List[UnitVector3d]]:
710 """Compute the lists of unit vectors on the sphere that correspond to
711 the sky positions of detector corners.
713 Parameters
714 ----------
715 exposure : `DimensionRecord`
716 Dimension record for the exposure.
717 collections : Any, optional
718 Collections to be searched for raws and camera geometry, overriding
719 ``self.butler.collections``.
720 Can be any of the types supported by the ``collections`` argument
721 to butler construction.
723 Returns
724 -------
725 bounds : `dict`
726 Dictionary mapping detector ID to a list of unit vectors on the
727 sphere representing that detector's corners projected onto the sky.
728 """
729 if collections is None:
730 collections = self.butler.collections
731 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections)
732 if not versioned and self.config.requireVersionedCamera:
733 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.")
735 # Derive WCS from boresight information -- if available in registry
736 use_registry = True
737 try:
738 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees)
739 radec = lsst.geom.SpherePoint(lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees),
740 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees))
741 except AttributeError:
742 use_registry = False
744 if use_registry:
745 if self.config.detectorId is None:
746 detectorId = next(camera.getIdIter())
747 else:
748 detectorId = self.config.detectorId
749 wcsDetector = camera[detectorId]
751 # Ask the raw formatter to create the relevant WCS
752 # This allows flips to be taken into account
753 instrument = self.getInstrument(exposure.instrument)
754 rawFormatter = instrument.getRawFormatter({"detector": detectorId})
755 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector)
757 else:
758 if self.config.detectorId is None:
759 wcsRefsIter = self.butler.registry.queryDatasets("raw.wcs", dataId=exposure.dataId,
760 collections=collections)
761 if not wcsRefsIter:
762 raise LookupError(f"No raw.wcs datasets found for data ID {exposure.dataId} "
763 f"in collections {collections}.")
764 wcsRef = next(iter(wcsRefsIter))
765 wcsDetector = camera[wcsRef.dataId["detector"]]
766 wcs = self.butler.getDirect(wcsRef)
767 else:
768 wcsDetector = camera[self.config.detectorId]
769 wcs = self.butler.get("raw.wcs", dataId=exposure.dataId, detector=self.config.detectorId,
770 collections=collections)
771 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform())
772 bounds = {}
773 for detector in camera:
774 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky)
775 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners()
776 bounds[detector.getId()] = [
777 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners)
778 ]
779 return bounds
781 def compute(self, visit: VisitDefinitionData, *, collections: Any = None
782 ) -> Tuple[Region, Dict[int, Region]]:
783 # Docstring inherited from ComputeVisitRegionsTask.
784 if self.config.mergeExposures:
785 detectorBounds = defaultdict(list)
786 for exposure in visit.exposures:
787 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections)
788 for detectorId, bounds in exposureDetectorBounds.items():
789 detectorBounds[detectorId].extend(bounds)
790 else:
791 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections)
792 visitBounds = []
793 detectorRegions = {}
794 for detectorId, bounds in detectorBounds.items():
795 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds)
796 visitBounds.extend(bounds)
797 return ConvexPolygon.convexHull(visitBounds), detectorRegions