Coverage for python/lsst/obs/base/defineVisits.py : 30%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of obs_base.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22from __future__ import annotations
24__all__ = [
25 "DefineVisitsConfig",
26 "DefineVisitsTask",
27 "GroupExposuresConfig",
28 "GroupExposuresTask",
29 "VisitDefinitionData",
30]
32from abc import ABCMeta, abstractmethod
33from collections import defaultdict
34import dataclasses
35from typing import Any, Dict, Iterable, List, Optional, Tuple
36from multiprocessing import Pool
38from lsst.daf.butler import (
39 Butler,
40 DataId,
41 DimensionGraph,
42 DimensionRecord,
43 Timespan,
44 TIMESPAN_FIELD_SPECS,
45)
47import lsst.geom
48from lsst.geom import Box2D
49from lsst.pex.config import Config, Field, makeRegistry, registerConfigurable
50from lsst.afw.cameraGeom import FOCAL_PLANE, PIXELS
51from lsst.pipe.base import Task
52from lsst.sphgeom import ConvexPolygon, Region, UnitVector3d
53from ._instrument import loadCamera, Instrument
56@dataclasses.dataclass
57class VisitDefinitionData:
58 """Struct representing a group of exposures that will be used to define a
59 visit.
60 """
62 instrument: str
63 """Name of the instrument this visit will be associated with.
64 """
66 id: int
67 """Integer ID of the visit.
69 This must be unique across all visit systems for the instrument.
70 """
72 name: str
73 """String name for the visit.
75 This must be unique across all visit systems for the instrument.
76 """
78 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list)
79 """Dimension records for the exposures that are part of this visit.
80 """
83@dataclasses.dataclass
84class _VisitRecords:
85 """Struct containing the dimension records associated with a visit.
86 """
88 visit: DimensionRecord
89 """Record for the 'visit' dimension itself.
90 """
92 visit_definition: List[DimensionRecord]
93 """Records for 'visit_definition', which relates 'visit' to 'exposure'.
94 """
96 visit_detector_region: List[DimensionRecord]
97 """Records for 'visit_detector_region', which associates the combination
98 of a 'visit' and a 'detector' with a region on the sky.
99 """
102class GroupExposuresConfig(Config):
103 pass
106class GroupExposuresTask(Task, metaclass=ABCMeta):
107 """Abstract base class for the subtask of `DefineVisitsTask` that is
108 responsible for grouping exposures into visits.
110 Subclasses should be registered with `GroupExposuresTask.registry` to
111 enable use by `DefineVisitsTask`, and should generally correspond to a
112 particular 'visit_system' dimension value. They are also responsible for
113 defining visit IDs and names that are unique across all visit systems in
114 use by an instrument.
116 Parameters
117 ----------
118 config : `GroupExposuresConfig`
119 Configuration information.
120 **kwargs
121 Additional keyword arguments forwarded to the `Task` constructor.
122 """
123 def __init__(self, config: GroupExposuresConfig, **kwargs: Any):
124 Task.__init__(self, config=config, **kwargs)
126 ConfigClass = GroupExposuresConfig
128 _DefaultName = "groupExposures"
130 registry = makeRegistry(
131 doc="Registry of algorithms for grouping exposures into visits.",
132 configBaseType=GroupExposuresConfig,
133 )
135 @abstractmethod
136 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
137 """Group the given exposures into visits.
139 Parameters
140 ----------
141 exposures : `list` [ `DimensionRecord` ]
142 DimensionRecords (for the 'exposure' dimension) describing the
143 exposures to group.
145 Returns
146 -------
147 visits : `Iterable` [ `VisitDefinitionData` ]
148 Structs identifying the visits and the exposures associated with
149 them. This may be an iterator or a container.
150 """
151 raise NotImplementedError()
153 @abstractmethod
154 def getVisitSystem(self) -> Tuple[int, str]:
155 """Return identifiers for the 'visit_system' dimension this
156 algorithm implements.
158 Returns
159 -------
160 id : `int`
161 Integer ID for the visit system (given an instrument).
162 name : `str`
163 Unique string identifier for the visit system (given an
164 instrument).
165 """
166 raise NotImplementedError()
169class ComputeVisitRegionsConfig(Config):
170 padding = Field(
171 dtype=int,
172 default=0,
173 doc=("Pad raw image bounding boxes with specified number of pixels "
174 "when calculating their (conservatively large) region on the "
175 "sky."),
176 )
179class ComputeVisitRegionsTask(Task, metaclass=ABCMeta):
180 """Abstract base class for the subtask of `DefineVisitsTask` that is
181 responsible for extracting spatial regions for visits and visit+detector
182 combinations.
184 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to
185 enable use by `DefineVisitsTask`.
187 Parameters
188 ----------
189 config : `ComputeVisitRegionsConfig`
190 Configuration information.
191 butler : `lsst.daf.butler.Butler`
192 The butler to use.
193 **kwargs
194 Additional keyword arguments forwarded to the `Task` constructor.
195 """
196 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any):
197 Task.__init__(self, config=config, **kwargs)
198 self.butler = butler
199 self.instrumentMap = {}
201 ConfigClass = ComputeVisitRegionsConfig
203 _DefaultName = "computeVisitRegions"
205 registry = makeRegistry(
206 doc=("Registry of algorithms for computing on-sky regions for visits "
207 "and visit+detector combinations."),
208 configBaseType=ComputeVisitRegionsConfig,
209 )
211 def getInstrument(self, instrumentName) -> Instrument:
212 """Retrieve an `~lsst.obs.base.Instrument` associated with this
213 instrument name.
215 Parameters
216 ----------
217 instrumentName : `str`
218 The name of the instrument.
220 Returns
221 -------
222 instrument : `~lsst.obs.base.Instrument`
223 The associated instrument object.
225 Notes
226 -----
227 The result is cached.
228 """
229 instrument = self.instrumentMap.get(instrumentName)
230 if instrument is None:
231 instrument = Instrument.fromName(instrumentName, self.butler.registry)
232 self.instrumentMap[instrumentName] = instrument
233 return instrument
235 @abstractmethod
236 def compute(self, visit: VisitDefinitionData, *, collections: Any = None
237 ) -> Tuple[Region, Dict[int, Region]]:
238 """Compute regions for the given visit and all detectors in that visit.
240 Parameters
241 ----------
242 visit : `VisitDefinitionData`
243 Struct describing the visit and the exposures associated with it.
244 collections : Any, optional
245 Collections to be searched for raws and camera geometry, overriding
246 ``self.butler.collections``.
247 Can be any of the types supported by the ``collections`` argument
248 to butler construction.
250 Returns
251 -------
252 visitRegion : `lsst.sphgeom.Region`
253 Region for the full visit.
254 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ]
255 Dictionary mapping detector ID to the region for that detector.
256 Should include all detectors in the visit.
257 """
258 raise NotImplementedError()
261class DefineVisitsConfig(Config):
262 groupExposures = GroupExposuresTask.registry.makeField(
263 doc="Algorithm for grouping exposures into visits.",
264 default="one-to-one",
265 )
266 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField(
267 doc="Algorithm from computing visit and visit+detector regions.",
268 default="single-raw-wcs",
269 )
270 ignoreNonScienceExposures = Field(
271 doc=("If True, silently ignore input exposures that do not have "
272 "observation_type=SCIENCE. If False, raise an exception if one "
273 "encountered."),
274 dtype=bool,
275 optional=False,
276 default=True,
277 )
280class DefineVisitsTask(Task):
281 """Driver Task for defining visits (and their spatial regions) in Gen3
282 Butler repositories.
284 Parameters
285 ----------
286 config : `DefineVisitsConfig`
287 Configuration for the task.
288 butler : `~lsst.daf.butler.Butler`
289 Writeable butler instance. Will be used to read `raw.wcs` and `camera`
290 datasets and insert/sync dimension data.
291 **kwargs
292 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task`
293 constructor.
295 Notes
296 -----
297 Each instance of `DefineVisitsTask` reads from / writes to the same Butler.
298 Each invocation of `DefineVisitsTask.run` processes an independent group of
299 exposures into one or more new vists, all belonging to the same visit
300 system and instrument.
302 The actual work of grouping exposures and computing regions is delegated
303 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`),
304 respectively. The defaults are to create one visit for every exposure,
305 and to use exactly one (arbitrary) detector-level raw dataset's WCS along
306 with camera geometry to compute regions for all detectors. Other
307 implementations can be created and configured for instruments for which
308 these choices are unsuitable (e.g. because visits and exposures are not
309 one-to-one, or because ``raw.wcs`` datasets for different detectors may not
310 be consistent with camera geomery).
312 It is not necessary in general to ingest all raws for an exposure before
313 defining a visit that includes the exposure; this depends entirely on the
314 `ComputeVisitRegionTask` subclass used. For the default configuration,
315 a single raw for each exposure is sufficient.
316 """
317 def __init__(self, config: Optional[DefineVisitsConfig] = None, *, butler: Butler, **kwargs: Any):
318 config.validate() # Not a CmdlineTask nor PipelineTask, so have to validate the config here.
319 super().__init__(config, **kwargs)
320 self.butler = butler
321 self.universe = self.butler.registry.dimensions
322 self.makeSubtask("groupExposures")
323 self.makeSubtask("computeVisitRegions", butler=self.butler)
325 ConfigClass = DefineVisitsConfig
327 _DefaultName = "defineVisits"
329 def _buildVisitRecords(self, definition: VisitDefinitionData, *,
330 collections: Any = None) -> _VisitRecords:
331 """Build the DimensionRecords associated with a visit.
333 Parameters
334 ----------
335 definition : `VisitDefinition`
336 Struct with identifiers for the visit and records for its
337 constituent exposures.
338 collections : Any, optional
339 Collections to be searched for raws and camera geometry, overriding
340 ``self.butler.collections``.
341 Can be any of the types supported by the ``collections`` argument
342 to butler construction.
344 Results
345 -------
346 records : `_VisitRecords`
347 Struct containing DimensionRecords for the visit, including
348 associated dimension elements.
349 """
350 # Compute all regions.
351 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute(definition,
352 collections=collections)
353 # Aggregate other exposure quantities.
354 timespan = Timespan(
355 begin=_reduceOrNone(min, (e.timespan.begin for e in definition.exposures)),
356 end=_reduceOrNone(max, (e.timespan.end for e in definition.exposures)),
357 )
358 exposure_time = _reduceOrNone(sum, (e.exposure_time for e in definition.exposures))
359 physical_filter = _reduceOrNone(lambda a, b: a if a == b else None,
360 (e.physical_filter for e in definition.exposures))
361 target_name = _reduceOrNone(lambda a, b: a if a == b else None,
362 (e.target_name for e in definition.exposures))
363 science_program = _reduceOrNone(lambda a, b: a if a == b else None,
364 (e.science_program for e in definition.exposures))
366 # Use the mean zenith angle as an approximation
367 zenith_angle = _reduceOrNone(sum, (e.zenith_angle for e in definition.exposures))
368 if zenith_angle is not None:
369 zenith_angle /= len(definition.exposures)
371 # Construct the actual DimensionRecords.
372 return _VisitRecords(
373 visit=self.universe["visit"].RecordClass.fromDict({
374 "instrument": definition.instrument,
375 "id": definition.id,
376 "name": definition.name,
377 "physical_filter": physical_filter,
378 "target_name": target_name,
379 "science_program": science_program,
380 "zenith_angle": zenith_angle,
381 "visit_system": self.groupExposures.getVisitSystem()[0],
382 "exposure_time": exposure_time,
383 TIMESPAN_FIELD_SPECS.begin.name: timespan.begin,
384 TIMESPAN_FIELD_SPECS.end.name: timespan.end,
385 "region": visitRegion,
386 # TODO: no seeing value in exposure dimension records, so we can't
387 # set that here. But there are many other columns that both
388 # dimensions should probably have as well.
389 }),
390 visit_definition=[
391 self.universe["visit_definition"].RecordClass.fromDict({
392 "instrument": definition.instrument,
393 "visit": definition.id,
394 "exposure": exposure.id,
395 "visit_system": self.groupExposures.getVisitSystem()[0],
396 })
397 for exposure in definition.exposures
398 ],
399 visit_detector_region=[
400 self.universe["visit_detector_region"].RecordClass.fromDict({
401 "instrument": definition.instrument,
402 "visit": definition.id,
403 "detector": detectorId,
404 "region": detectorRegion,
405 })
406 for detectorId, detectorRegion in visitDetectorRegions.items()
407 ]
408 )
410 def run(self, dataIds: Iterable[DataId], *,
411 pool: Optional[Pool] = None,
412 processes: int = 1,
413 collections: Optional[str] = None):
414 """Add visit definitions to the registry for the given exposures.
416 Parameters
417 ----------
418 dataIds : `Iterable` [ `dict` or `DataCoordinate` ]
419 Exposure-level data IDs. These must all correspond to the same
420 instrument, and are expected to be on-sky science exposures.
421 pool : `multiprocessing.Pool`, optional
422 If not `None`, a process pool with which to parallelize some
423 operations.
424 processes : `int`, optional
425 The number of processes to use. Ignored if ``pool`` is not `None`.
426 collections : Any, optional
427 Collections to be searched for raws and camera geometry, overriding
428 ``self.butler.collections``.
429 Can be any of the types supported by the ``collections`` argument
430 to butler construction.
431 """
432 # Set up multiprocessing, if desired.
433 if pool is None and processes > 1:
434 pool = Pool(processes)
435 mapFunc = map if pool is None else pool.imap_unordered
436 # Normalize, expand, and deduplicate data IDs.
437 self.log.info("Preprocessing data IDs.")
438 dimensions = DimensionGraph(self.universe, names=["exposure"])
439 dataIds = set(mapFunc(lambda d: self.butler.registry.expandDataId(d, graph=dimensions), dataIds))
440 if not dataIds:
441 raise RuntimeError("No exposures given.")
442 # Extract exposure DimensionRecords, check that there's only one
443 # instrument in play, and check for non-science exposures.
444 exposures = []
445 instruments = set()
446 for dataId in dataIds:
447 record = dataId.records["exposure"]
448 if record.observation_type != "science":
449 if self.config.ignoreNonScienceExposures:
450 continue
451 else:
452 raise RuntimeError(f"Input exposure {dataId} has observation_type "
453 f"{record.observation_type}, not 'science'.")
454 instruments.add(dataId["instrument"])
455 exposures.append(record)
456 if not exposures:
457 self.log.info("No science exposures found after filtering.")
458 return
459 if len(instruments) > 1:
460 raise RuntimeError(
461 f"All data IDs passed to DefineVisitsTask.run must be "
462 f"from the same instrument; got {instruments}."
463 )
464 instrument, = instruments
465 # Ensure the visit_system our grouping algorithm uses is in the
466 # registry, if it wasn't already.
467 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem()
468 self.log.info("Registering visit_system %d: %s.", visitSystemId, visitSystemName)
469 self.butler.registry.syncDimensionData(
470 "visit_system",
471 {"instrument": instrument, "id": visitSystemId, "name": visitSystemName}
472 )
473 # Group exposures into visits, delegating to subtask.
474 self.log.info("Grouping %d exposure(s) into visits.", len(exposures))
475 definitions = list(self.groupExposures.group(exposures))
476 # Compute regions and build DimensionRecords for each visit.
477 # This is the only parallel step, but it _should_ be the most expensive
478 # one (unless DB operations are slow).
479 self.log.info("Computing regions and other metadata for %d visit(s).", len(definitions))
480 allRecords = mapFunc(lambda d: self._buildVisitRecords(d, collections=collections), definitions)
481 # Iterate over visits and insert dimension data, one transaction per
482 # visit.
483 for visitRecords in allRecords:
484 with self.butler.registry.transaction():
485 self.butler.registry.insertDimensionData("visit", visitRecords.visit)
486 self.butler.registry.insertDimensionData("visit_definition",
487 *visitRecords.visit_definition)
488 self.butler.registry.insertDimensionData("visit_detector_region",
489 *visitRecords.visit_detector_region)
492def _reduceOrNone(func, iterable):
493 """Apply a binary function to pairs of elements in an iterable until a
494 single value is returned, but return `None` if any element is `None` or
495 there are no elements.
496 """
497 r = None
498 for v in iterable:
499 if v is None:
500 return None
501 if r is None:
502 r = v
503 else:
504 r = func(r, v)
505 return r
508class _GroupExposuresOneToOneConfig(GroupExposuresConfig):
509 visitSystemId = Field(
510 doc=("Integer ID of the visit_system implemented by this grouping "
511 "algorithm."),
512 dtype=int,
513 default=0,
514 )
515 visitSystemName = Field(
516 doc=("String name of the visit_system implemented by this grouping "
517 "algorithm."),
518 dtype=str,
519 default="one-to-one",
520 )
523@registerConfigurable("one-to-one", GroupExposuresTask.registry)
524class _GroupExposuresOneToOneTask(GroupExposuresTask, metaclass=ABCMeta):
525 """An exposure grouping algorithm that simply defines one visit for each
526 exposure, reusing the exposures identifiers for the visit.
527 """
529 ConfigClass = _GroupExposuresOneToOneConfig
531 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
532 # Docstring inherited from GroupExposuresTask.
533 for exposure in exposures:
534 yield VisitDefinitionData(
535 instrument=exposure.instrument,
536 id=exposure.id,
537 name=exposure.name,
538 exposures=[exposure],
539 )
541 def getVisitSystem(self) -> Tuple[int, str]:
542 # Docstring inherited from GroupExposuresTask.
543 return (self.config.visitSystemId, self.config.visitSystemName)
546class _GroupExposuresByGroupMetadataConfig(GroupExposuresConfig):
547 visitSystemId = Field(
548 doc=("Integer ID of the visit_system implemented by this grouping "
549 "algorithm."),
550 dtype=int,
551 default=1,
552 )
553 visitSystemName = Field(
554 doc=("String name of the visit_system implemented by this grouping "
555 "algorithm."),
556 dtype=str,
557 default="by-group-metadata",
558 )
561@registerConfigurable("by-group-metadata", GroupExposuresTask.registry)
562class _GroupExposuresByGroupMetadataTask(GroupExposuresTask, metaclass=ABCMeta):
563 """An exposure grouping algorithm that uses exposure.group_name and
564 exposure.group_id.
566 This algorithm _assumes_ exposure.group_id (generally populated from
567 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique,
568 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't,
569 it will be impossible to ever use both this grouping algorithm and the
570 one-to-one algorithm for a particular camera in the same data repository.
571 """
573 ConfigClass = _GroupExposuresByGroupMetadataConfig
575 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
576 # Docstring inherited from GroupExposuresTask.
577 groups = defaultdict(list)
578 for exposure in exposures:
579 groups[exposure.group_name].append(exposure)
580 for visitName, exposuresInGroup in groups.items():
581 instrument = exposuresInGroup[0].instrument
582 visitId = exposuresInGroup[0].group_id
583 assert all(e.group_id == visitId for e in exposuresInGroup), \
584 "Grouping by exposure.group_name does not yield consistent group IDs"
585 yield VisitDefinitionData(instrument=instrument, id=visitId, name=visitName,
586 exposures=exposuresInGroup)
588 def getVisitSystem(self) -> Tuple[int, str]:
589 # Docstring inherited from GroupExposuresTask.
590 return (self.config.visitSystemId, self.config.visitSystemName)
593class _ComputeVisitRegionsFromSingleRawWcsConfig(ComputeVisitRegionsConfig):
594 mergeExposures = Field(
595 doc=("If True, merge per-detector regions over all exposures in a "
596 "visit (via convex hull) instead of using the first exposure and "
597 "assuming its regions are valid for all others."),
598 dtype=bool,
599 default=False,
600 )
601 detectorId = Field(
602 doc=("Load the WCS for the detector with this ID. If None, use an "
603 "arbitrary detector (the first found in a query of the data "
604 "repository for each exposure (or all exposures, if "
605 "mergeExposures is True)."),
606 dtype=int,
607 optional=True,
608 default=None
609 )
610 requireVersionedCamera = Field(
611 doc=("If True, raise LookupError if version camera geometry cannot be "
612 "loaded for an exposure. If False, use the nominal camera from "
613 "the Instrument class instead."),
614 dtype=bool,
615 optional=False,
616 default=False,
617 )
620@registerConfigurable("single-raw-wcs", ComputeVisitRegionsTask.registry)
621class _ComputeVisitRegionsFromSingleRawWcsTask(ComputeVisitRegionsTask):
622 """A visit region calculator that uses a single raw WCS and a camera to
623 project the bounding boxes of all detectors onto the sky, relating
624 different detectors by their positions in focal plane coordinates.
626 Notes
627 -----
628 Most instruments should have their raw WCSs determined from a combination
629 of boresight angle, rotator angle, and camera geometry, and hence this
630 algorithm should produce stable results regardless of which detector the
631 raw corresponds to. If this is not the case (e.g. because a per-file FITS
632 WCS is used instead), either the ID of the detector should be fixed (see
633 the ``detectorId`` config parameter) or a different algorithm used.
634 """
636 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig
638 def computeExposureBounds(self, exposure: DimensionRecord, *, collections: Any = None
639 ) -> Dict[int, List[UnitVector3d]]:
640 """Compute the lists of unit vectors on the sphere that correspond to
641 the sky positions of detector corners.
643 Parameters
644 ----------
645 exposure : `DimensionRecord`
646 Dimension record for the exposure.
647 collections : Any, optional
648 Collections to be searched for raws and camera geometry, overriding
649 ``self.butler.collections``.
650 Can be any of the types supported by the ``collections`` argument
651 to butler construction.
653 Returns
654 -------
655 bounds : `dict`
656 Dictionary mapping detector ID to a list of unit vectors on the
657 sphere representing that detector's corners projected onto the sky.
658 """
659 if collections is None:
660 collections = self.butler.collections
661 camera, versioned = loadCamera(self.butler, exposure.dataId, collections=collections)
662 if not versioned and self.config.requireVersionedCamera:
663 raise LookupError(f"No versioned camera found for exposure {exposure.dataId}.")
665 # Derive WCS from boresight information -- if available in registry
666 use_registry = True
667 try:
668 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees)
669 radec = lsst.geom.SpherePoint(lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees),
670 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees))
671 except AttributeError:
672 use_registry = False
674 if use_registry:
675 if self.config.detectorId is None:
676 detectorId = next(camera.getIdIter())
677 else:
678 detectorId = self.config.detectorId
679 wcsDetector = camera[detectorId]
681 # Ask the raw formatter to create the relevant WCS
682 # This allows flips to be taken into account
683 instrument = self.getInstrument(exposure.instrument)
684 rawFormatter = instrument.getRawFormatter({"detector": detectorId})
685 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector)
687 else:
688 if self.config.detectorId is None:
689 wcsRefsIter = self.butler.registry.queryDatasets("raw.wcs", dataId=exposure.dataId,
690 collections=collections)
691 if not wcsRefsIter:
692 raise LookupError(f"No raw.wcs datasets found for data ID {exposure.dataId} "
693 f"in collections {collections}.")
694 wcsRef = next(iter(wcsRefsIter))
695 wcsDetector = camera[wcsRef.dataId["detector"]]
696 wcs = self.butler.getDirect(wcsRef)
697 else:
698 wcsDetector = camera[self.config.detectorId]
699 wcs = self.butler.get("raw.wcs", dataId=exposure.dataId, detector=self.config.detectorId,
700 collections=collections)
701 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform())
702 bounds = {}
703 for detector in camera:
704 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky)
705 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners()
706 bounds[detector.getId()] = [
707 skyCorner.getVector() for skyCorner in pixelsToSky.applyForward(pixCorners)
708 ]
709 return bounds
711 def compute(self, visit: VisitDefinitionData, *, collections: Any = None
712 ) -> Tuple[Region, Dict[int, Region]]:
713 # Docstring inherited from ComputeVisitRegionsTask.
714 if self.config.mergeExposures:
715 detectorBounds = defaultdict(list)
716 for exposure in visit.exposures:
717 exposureDetectorBounds = self.computeExposureBounds(exposure, collections=collections)
718 for detectorId, bounds in exposureDetectorBounds.items():
719 detectorBounds[detectorId].extend(bounds)
720 else:
721 detectorBounds = self.computeExposureBounds(visit.exposures[0], collections=collections)
722 visitBounds = []
723 detectorRegions = {}
724 for detectorId, bounds in detectorBounds.items():
725 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds)
726 visitBounds.extend(bounds)
727 return ConvexPolygon.convexHull(visitBounds), detectorRegions