22 from __future__
import annotations
27 "GroupExposuresConfig",
29 "VisitDefinitionData",
32 from abc
import ABCMeta, abstractmethod
33 from collections
import defaultdict
36 from typing
import Any, Dict, Iterable, List, Optional, Tuple
37 from multiprocessing
import Pool
39 from lsst.daf.butler
import (
49 from lsst.geom
import Box2D
50 from lsst.pex.config
import Config, Field, makeRegistry, registerConfigurable
51 from lsst.afw.cameraGeom
import FOCAL_PLANE, PIXELS
52 from lsst.pipe.base
import Task
53 from lsst.sphgeom
import ConvexPolygon, Region, UnitVector3d
54 from ._instrument
import loadCamera, Instrument
57 @dataclasses.dataclass
59 """Struct representing a group of exposures that will be used to define a
64 """Name of the instrument this visit will be associated with.
68 """Integer ID of the visit.
70 This must be unique across all visit systems for the instrument.
74 """String name for the visit.
76 This must be unique across all visit systems for the instrument.
79 exposures: List[DimensionRecord] = dataclasses.field(default_factory=list)
80 """Dimension records for the exposures that are part of this visit.
84 @dataclasses.dataclass
86 """Struct containing the dimension records associated with a visit.
89 visit: DimensionRecord
90 """Record for the 'visit' dimension itself.
93 visit_definition: List[DimensionRecord]
94 """Records for 'visit_definition', which relates 'visit' to 'exposure'.
97 visit_detector_region: List[DimensionRecord]
98 """Records for 'visit_detector_region', which associates the combination
99 of a 'visit' and a 'detector' with a region on the sky.
107 class GroupExposuresTask(Task, metaclass=ABCMeta):
108 """Abstract base class for the subtask of `DefineVisitsTask` that is
109 responsible for grouping exposures into visits.
111 Subclasses should be registered with `GroupExposuresTask.registry` to
112 enable use by `DefineVisitsTask`, and should generally correspond to a
113 particular 'visit_system' dimension value. They are also responsible for
114 defining visit IDs and names that are unique across all visit systems in
115 use by an instrument.
119 config : `GroupExposuresConfig`
120 Configuration information.
122 Additional keyword arguments forwarded to the `Task` constructor.
124 def __init__(self, config: GroupExposuresConfig, **kwargs: Any):
125 Task.__init__(self, config=config, **kwargs)
127 ConfigClass = GroupExposuresConfig
129 _DefaultName =
"groupExposures"
131 registry = makeRegistry(
132 doc=
"Registry of algorithms for grouping exposures into visits.",
133 configBaseType=GroupExposuresConfig,
137 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
138 """Group the given exposures into visits.
142 exposures : `list` [ `DimensionRecord` ]
143 DimensionRecords (for the 'exposure' dimension) describing the
148 visits : `Iterable` [ `VisitDefinitionData` ]
149 Structs identifying the visits and the exposures associated with
150 them. This may be an iterator or a container.
152 raise NotImplementedError()
156 """Return identifiers for the 'visit_system' dimension this
157 algorithm implements.
162 Integer ID for the visit system (given an instrument).
164 Unique string identifier for the visit system (given an
167 raise NotImplementedError()
174 doc=(
"Pad raw image bounding boxes with specified number of pixels "
175 "when calculating their (conservatively large) region on the "
181 """Abstract base class for the subtask of `DefineVisitsTask` that is
182 responsible for extracting spatial regions for visits and visit+detector
185 Subclasses should be registered with `ComputeVisitRegionsTask.registry` to
186 enable use by `DefineVisitsTask`.
190 config : `ComputeVisitRegionsConfig`
191 Configuration information.
192 butler : `lsst.daf.butler.Butler`
195 Additional keyword arguments forwarded to the `Task` constructor.
197 def __init__(self, config: ComputeVisitRegionsConfig, *, butler: Butler, **kwargs: Any):
198 Task.__init__(self, config=config, **kwargs)
202 ConfigClass = ComputeVisitRegionsConfig
204 _DefaultName =
"computeVisitRegions"
206 registry = makeRegistry(
207 doc=(
"Registry of algorithms for computing on-sky regions for visits "
208 "and visit+detector combinations."),
209 configBaseType=ComputeVisitRegionsConfig,
213 """Retrieve an `~lsst.obs.base.Instrument` associated with this
218 instrumentName : `str`
219 The name of the instrument.
223 instrument : `~lsst.obs.base.Instrument`
224 The associated instrument object.
228 The result is cached.
231 if instrument
is None:
232 instrument = Instrument.fromName(instrumentName, self.
butler.registry)
237 def compute(self, visit: VisitDefinitionData, *, collections: Any =
None
238 ) -> Tuple[Region, Dict[int, Region]]:
239 """Compute regions for the given visit and all detectors in that visit.
243 visit : `VisitDefinitionData`
244 Struct describing the visit and the exposures associated with it.
245 collections : Any, optional
246 Collections to be searched for raws and camera geometry, overriding
247 ``self.butler.collections``.
248 Can be any of the types supported by the ``collections`` argument
249 to butler construction.
253 visitRegion : `lsst.sphgeom.Region`
254 Region for the full visit.
255 visitDetectorRegions : `dict` [ `int`, `lsst.sphgeom.Region` ]
256 Dictionary mapping detector ID to the region for that detector.
257 Should include all detectors in the visit.
259 raise NotImplementedError()
263 groupExposures = GroupExposuresTask.registry.makeField(
264 doc=
"Algorithm for grouping exposures into visits.",
265 default=
"one-to-one",
267 computeVisitRegions = ComputeVisitRegionsTask.registry.makeField(
268 doc=
"Algorithm from computing visit and visit+detector regions.",
269 default=
"single-raw-wcs",
271 ignoreNonScienceExposures = Field(
272 doc=(
"If True, silently ignore input exposures that do not have "
273 "observation_type=SCIENCE. If False, raise an exception if one "
282 """Driver Task for defining visits (and their spatial regions) in Gen3
287 config : `DefineVisitsConfig`
288 Configuration for the task.
289 butler : `~lsst.daf.butler.Butler`
290 Writeable butler instance. Will be used to read `raw.wcs` and `camera`
291 datasets and insert/sync dimension data.
293 Additional keyword arguments are forwarded to the `lsst.pipe.base.Task`
298 Each instance of `DefineVisitsTask` reads from / writes to the same Butler.
299 Each invocation of `DefineVisitsTask.run` processes an independent group of
300 exposures into one or more new vists, all belonging to the same visit
301 system and instrument.
303 The actual work of grouping exposures and computing regions is delegated
304 to pluggable subtasks (`GroupExposuresTask` and `ComputeVisitRegionsTask`),
305 respectively. The defaults are to create one visit for every exposure,
306 and to use exactly one (arbitrary) detector-level raw dataset's WCS along
307 with camera geometry to compute regions for all detectors. Other
308 implementations can be created and configured for instruments for which
309 these choices are unsuitable (e.g. because visits and exposures are not
310 one-to-one, or because ``raw.wcs`` datasets for different detectors may not
311 be consistent with camera geomery).
313 It is not necessary in general to ingest all raws for an exposure before
314 defining a visit that includes the exposure; this depends entirely on the
315 `ComputeVisitRegionTask` subclass used. For the default configuration,
316 a single raw for each exposure is sufficient.
318 def __init__(self, config: Optional[DefineVisitsConfig] =
None, *, butler: Butler, **kwargs: Any):
323 self.makeSubtask(
"groupExposures")
324 self.makeSubtask(
"computeVisitRegions", butler=self.
butler)
329 def _makeTask(cls, config: DefineVisitsConfig, butler: Butler, name: str, parentTask: Task):
330 """Construct a DefineVisitsTask using only positional arguments.
334 All parameters are as for `DefineVisitsTask`.
336 return cls(config=config, butler=butler, name=name, parentTask=parentTask)
340 return (self.
_makeTask, (self.config, self.
butler, self._name, self._parentTask))
342 ConfigClass = DefineVisitsConfig
344 _DefaultName =
"defineVisits"
346 def _buildVisitRecords(self, definition: VisitDefinitionData, *,
347 collections: Any =
None) -> _VisitRecords:
348 """Build the DimensionRecords associated with a visit.
352 definition : `VisitDefinition`
353 Struct with identifiers for the visit and records for its
354 constituent exposures.
355 collections : Any, optional
356 Collections to be searched for raws and camera geometry, overriding
357 ``self.butler.collections``.
358 Can be any of the types supported by the ``collections`` argument
359 to butler construction.
363 records : `_VisitRecords`
364 Struct containing DimensionRecords for the visit, including
365 associated dimension elements.
368 visitRegion, visitDetectorRegions = self.computeVisitRegions.compute(definition,
369 collections=collections)
372 begin=_reduceOrNone(min, (e.timespan.begin
for e
in definition.exposures)),
373 end=_reduceOrNone(max, (e.timespan.end
for e
in definition.exposures)),
375 exposure_time = _reduceOrNone(sum, (e.exposure_time
for e
in definition.exposures))
376 physical_filter = _reduceOrNone(
lambda a, b: a
if a == b
else None,
377 (e.physical_filter
for e
in definition.exposures))
378 target_name = _reduceOrNone(
lambda a, b: a
if a == b
else None,
379 (e.target_name
for e
in definition.exposures))
380 science_program = _reduceOrNone(
lambda a, b: a
if a == b
else None,
381 (e.science_program
for e
in definition.exposures))
382 observation_reason = _reduceOrNone(
lambda a, b: a
if a == b
else None,
383 (e.observation_reason
for e
in definition.exposures))
384 if observation_reason
is None:
386 observation_reason =
"various"
389 zenith_angle = _reduceOrNone(sum, (e.zenith_angle
for e
in definition.exposures))
390 if zenith_angle
is not None:
391 zenith_angle /= len(definition.exposures)
395 visit=self.
universe[
"visit"].RecordClass(
396 instrument=definition.instrument,
398 name=definition.name,
399 physical_filter=physical_filter,
400 target_name=target_name,
401 science_program=science_program,
402 observation_reason=observation_reason,
403 zenith_angle=zenith_angle,
404 visit_system=self.groupExposures.getVisitSystem()[0],
405 exposure_time=exposure_time,
413 self.
universe[
"visit_definition"].RecordClass(
414 instrument=definition.instrument,
416 exposure=exposure.id,
417 visit_system=self.groupExposures.getVisitSystem()[0],
419 for exposure
in definition.exposures
421 visit_detector_region=[
422 self.
universe[
"visit_detector_region"].RecordClass(
423 instrument=definition.instrument,
426 region=detectorRegion,
428 for detectorId, detectorRegion
in visitDetectorRegions.items()
432 def _expandExposureId(self, dataId: DataId) -> DataCoordinate:
433 """Return the expanded version of an exposure ID.
435 A private method to allow ID expansion in a pool without resorting
440 dataId : `dict` or `DataCoordinate`
441 Exposure-level data ID.
445 expanded : `DataCoordinate`
446 A data ID that includes full metadata for all exposure dimensions.
448 dimensions = DimensionGraph(self.
universe, names=[
"exposure"])
449 return self.
butler.registry.expandDataId(dataId, graph=dimensions)
451 def _buildVisitRecordsSingle(self, args) -> _VisitRecords:
452 """Build the DimensionRecords associated with a visit and collection.
454 A wrapper for `_buildVisitRecords` to allow it to be run as part of
455 a pool without resorting to local callables.
459 args : `tuple` [`VisitDefinition`, any]
460 A tuple consisting of the ``definition`` and ``collections``
461 arguments to `_buildVisitRecords`, in that order.
465 records : `_VisitRecords`
466 Struct containing DimensionRecords for the visit, including
467 associated dimension elements.
471 def run(self, dataIds: Iterable[DataId], *,
472 pool: Optional[Pool] =
None,
474 collections: Optional[str] =
None):
475 """Add visit definitions to the registry for the given exposures.
479 dataIds : `Iterable` [ `dict` or `DataCoordinate` ]
480 Exposure-level data IDs. These must all correspond to the same
481 instrument, and are expected to be on-sky science exposures.
482 pool : `multiprocessing.Pool`, optional
483 If not `None`, a process pool with which to parallelize some
485 processes : `int`, optional
486 The number of processes to use. Ignored if ``pool`` is not `None`.
487 collections : Any, optional
488 Collections to be searched for raws and camera geometry, overriding
489 ``self.butler.collections``.
490 Can be any of the types supported by the ``collections`` argument
491 to butler construction.
494 if pool
is None and processes > 1:
495 pool = Pool(processes)
496 mapFunc = map
if pool
is None else pool.imap_unordered
498 self.log.info(
"Preprocessing data IDs.")
501 raise RuntimeError(
"No exposures given.")
506 for dataId
in dataIds:
507 record = dataId.records[
"exposure"]
508 if record.observation_type !=
"science":
509 if self.config.ignoreNonScienceExposures:
512 raise RuntimeError(f
"Input exposure {dataId} has observation_type "
513 f
"{record.observation_type}, not 'science'.")
514 instruments.add(dataId[
"instrument"])
515 exposures.append(record)
517 self.log.info(
"No science exposures found after filtering.")
519 if len(instruments) > 1:
521 f
"All data IDs passed to DefineVisitsTask.run must be "
522 f
"from the same instrument; got {instruments}."
524 instrument, = instruments
527 visitSystemId, visitSystemName = self.groupExposures.getVisitSystem()
528 self.log.info(
"Registering visit_system %d: %s.", visitSystemId, visitSystemName)
529 self.
butler.registry.syncDimensionData(
531 {
"instrument": instrument,
"id": visitSystemId,
"name": visitSystemName}
534 self.log.info(
"Grouping %d exposure(s) into visits.", len(exposures))
535 definitions = list(self.groupExposures.group(exposures))
539 self.log.info(
"Computing regions and other metadata for %d visit(s).", len(definitions))
541 zip(definitions, itertools.repeat(collections)))
544 for visitRecords
in allRecords:
545 with self.
butler.registry.transaction():
546 self.
butler.registry.insertDimensionData(
"visit", visitRecords.visit)
547 self.
butler.registry.insertDimensionData(
"visit_definition",
548 *visitRecords.visit_definition)
549 self.
butler.registry.insertDimensionData(
"visit_detector_region",
550 *visitRecords.visit_detector_region)
553 def _reduceOrNone(func, iterable):
554 """Apply a binary function to pairs of elements in an iterable until a
555 single value is returned, but return `None` if any element is `None` or
556 there are no elements.
570 visitSystemId = Field(
571 doc=(
"Integer ID of the visit_system implemented by this grouping "
576 visitSystemName = Field(
577 doc=(
"String name of the visit_system implemented by this grouping "
580 default=
"one-to-one",
584 @registerConfigurable(
"one-to-one", GroupExposuresTask.registry)
586 """An exposure grouping algorithm that simply defines one visit for each
587 exposure, reusing the exposures identifiers for the visit.
590 ConfigClass = _GroupExposuresOneToOneConfig
592 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
594 for exposure
in exposures:
596 instrument=exposure.instrument,
599 exposures=[exposure],
604 return (self.config.visitSystemId, self.config.visitSystemName)
608 visitSystemId = Field(
609 doc=(
"Integer ID of the visit_system implemented by this grouping "
614 visitSystemName = Field(
615 doc=(
"String name of the visit_system implemented by this grouping "
618 default=
"by-group-metadata",
622 @registerConfigurable(
"by-group-metadata", GroupExposuresTask.registry)
624 """An exposure grouping algorithm that uses exposure.group_name and
627 This algorithm _assumes_ exposure.group_id (generally populated from
628 `astro_metadata_translator.ObservationInfo.visit_id`) is not just unique,
629 but disjoint from all `ObservationInfo.exposure_id` values - if it isn't,
630 it will be impossible to ever use both this grouping algorithm and the
631 one-to-one algorithm for a particular camera in the same data repository.
634 ConfigClass = _GroupExposuresByGroupMetadataConfig
636 def group(self, exposures: List[DimensionRecord]) -> Iterable[VisitDefinitionData]:
638 groups = defaultdict(list)
639 for exposure
in exposures:
640 groups[exposure.group_name].append(exposure)
641 for visitName, exposuresInGroup
in groups.items():
642 instrument = exposuresInGroup[0].instrument
643 visitId = exposuresInGroup[0].group_id
644 assert all(e.group_id == visitId
for e
in exposuresInGroup), \
645 "Grouping by exposure.group_name does not yield consistent group IDs"
647 exposures=exposuresInGroup)
651 return (self.config.visitSystemId, self.config.visitSystemName)
655 mergeExposures = Field(
656 doc=(
"If True, merge per-detector regions over all exposures in a "
657 "visit (via convex hull) instead of using the first exposure and "
658 "assuming its regions are valid for all others."),
663 doc=(
"Load the WCS for the detector with this ID. If None, use an "
664 "arbitrary detector (the first found in a query of the data "
665 "repository for each exposure (or all exposures, if "
666 "mergeExposures is True)."),
671 requireVersionedCamera = Field(
672 doc=(
"If True, raise LookupError if version camera geometry cannot be "
673 "loaded for an exposure. If False, use the nominal camera from "
674 "the Instrument class instead."),
681 @registerConfigurable(
"single-raw-wcs", ComputeVisitRegionsTask.registry)
683 """A visit region calculator that uses a single raw WCS and a camera to
684 project the bounding boxes of all detectors onto the sky, relating
685 different detectors by their positions in focal plane coordinates.
689 Most instruments should have their raw WCSs determined from a combination
690 of boresight angle, rotator angle, and camera geometry, and hence this
691 algorithm should produce stable results regardless of which detector the
692 raw corresponds to. If this is not the case (e.g. because a per-file FITS
693 WCS is used instead), either the ID of the detector should be fixed (see
694 the ``detectorId`` config parameter) or a different algorithm used.
697 ConfigClass = _ComputeVisitRegionsFromSingleRawWcsConfig
700 ) -> Dict[int, List[UnitVector3d]]:
701 """Compute the lists of unit vectors on the sphere that correspond to
702 the sky positions of detector corners.
706 exposure : `DimensionRecord`
707 Dimension record for the exposure.
708 collections : Any, optional
709 Collections to be searched for raws and camera geometry, overriding
710 ``self.butler.collections``.
711 Can be any of the types supported by the ``collections`` argument
712 to butler construction.
717 Dictionary mapping detector ID to a list of unit vectors on the
718 sphere representing that detector's corners projected onto the sky.
720 if collections
is None:
721 collections = self.
butler.collections
722 camera, versioned =
loadCamera(self.
butler, exposure.dataId, collections=collections)
723 if not versioned
and self.config.requireVersionedCamera:
724 raise LookupError(f
"No versioned camera found for exposure {exposure.dataId}.")
729 orientation = lsst.geom.Angle(exposure.sky_angle, lsst.geom.degrees)
730 radec = lsst.geom.SpherePoint(lsst.geom.Angle(exposure.tracking_ra, lsst.geom.degrees),
731 lsst.geom.Angle(exposure.tracking_dec, lsst.geom.degrees))
732 except AttributeError:
736 if self.config.detectorId
is None:
737 detectorId = next(camera.getIdIter())
739 detectorId = self.config.detectorId
740 wcsDetector = camera[detectorId]
745 rawFormatter = instrument.getRawFormatter({
"detector": detectorId})
746 wcs = rawFormatter.makeRawSkyWcsFromBoresight(radec, orientation, wcsDetector)
749 if self.config.detectorId
is None:
750 wcsRefsIter = self.
butler.registry.queryDatasets(
"raw.wcs", dataId=exposure.dataId,
751 collections=collections)
753 raise LookupError(f
"No raw.wcs datasets found for data ID {exposure.dataId} "
754 f
"in collections {collections}.")
755 wcsRef = next(iter(wcsRefsIter))
756 wcsDetector = camera[wcsRef.dataId[
"detector"]]
757 wcs = self.
butler.getDirect(wcsRef)
759 wcsDetector = camera[self.config.detectorId]
760 wcs = self.
butler.get(
"raw.wcs", dataId=exposure.dataId, detector=self.config.detectorId,
761 collections=collections)
762 fpToSky = wcsDetector.getTransform(FOCAL_PLANE, PIXELS).then(wcs.getTransform())
764 for detector
in camera:
765 pixelsToSky = detector.getTransform(PIXELS, FOCAL_PLANE).then(fpToSky)
766 pixCorners = Box2D(detector.getBBox().dilatedBy(self.config.padding)).getCorners()
767 bounds[detector.getId()] = [
768 skyCorner.getVector()
for skyCorner
in pixelsToSky.applyForward(pixCorners)
772 def compute(self, visit: VisitDefinitionData, *, collections: Any =
None
773 ) -> Tuple[Region, Dict[int, Region]]:
775 if self.config.mergeExposures:
776 detectorBounds = defaultdict(list)
777 for exposure
in visit.exposures:
779 for detectorId, bounds
in exposureDetectorBounds.items():
780 detectorBounds[detectorId].extend(bounds)
785 for detectorId, bounds
in detectorBounds.items():
786 detectorRegions[detectorId] = ConvexPolygon.convexHull(bounds)
787 visitBounds.extend(bounds)
788 return ConvexPolygon.convexHull(visitBounds), detectorRegions