21 from __future__
import annotations
23 __all__ = [
"ConvertRepoConfig",
"ConvertRepoTask",
"ConvertRepoSkyMapConfig"]
27 from dataclasses
import dataclass
28 from typing
import Iterable, Optional, List, Dict
31 from lsst.daf.butler
import (
35 from lsst.pex.config
import Config, ConfigurableField, ConfigDictField, DictField, ListField, Field
36 from lsst.pipe.base
import Task
37 from lsst.skymap
import skyMapRegistry, BaseSkyMap
39 from ..ingest
import RawIngestTask
40 from .repoConverter
import ConversionSubset
41 from .rootRepoConverter
import RootRepoConverter
42 from .calibRepoConverter
import CalibRepoConverter
43 from .standardRepoConverter
import StandardRepoConverter
48 """Struct containing information about a skymap that may appear in a Gen2 53 """Name of the skymap used in Gen3 data IDs. 57 """Hash computed by `BaseSkyMap.getSha1`. 61 """Name of the skymap used in Gen3 data IDs. 65 """Whether this skymap has been found in at least one repository being 71 """Sub-config used to hold the parameters of a SkyMap. 75 This config only needs to exist because we can't put a 76 `~lsst.pex.config.RegistryField` directly inside a 77 `~lsst.pex.config.ConfigDictField`. 79 It needs to have its only field named "skyMap" for compatibility with the 80 configuration of `lsst.pipe.tasks.MakeSkyMapTask`, which we want so we can 81 use one config file in an obs package to configure both. 83 This name leads to unfortunate repetition with the field named 84 "skymap" that holds it - "skyMap[name].skyMap" - but that seems 87 skyMap = skyMapRegistry.makeField(
88 doc=
"Type and parameters for the SkyMap itself.",
94 raws = ConfigurableField(
95 "Configuration for subtask responsible for ingesting raws and adding " 96 "visit and exposure dimension entries.",
99 skyMaps = ConfigDictField(
100 "Mapping from Gen3 skymap name to the parameters used to construct a " 101 "BaseSkyMap instance. This will be used to associate names with " 102 "existing skymaps found in the Gen2 repo.",
104 itemtype=ConvertRepoSkyMapConfig,
107 rootSkyMapName = Field(
108 "Name of a Gen3 skymap (an entry in ``self.skyMaps``) to assume for " 109 "datasets in the root repository when no SkyMap is found there. ",
114 collections = DictField(
115 "Special collections (values) for certain dataset types (keys). " 116 "These are used in addition to rerun collections for datasets in " 117 "reruns. The 'raw' dataset must have an entry here if it is to be " 122 "deepCoadd_skyMap":
"skymaps",
123 "brightObjectMask":
"masks",
126 storageClasses = DictField(
127 "Mapping from dataset type name or Gen2 policy entry (e.g. 'python' " 128 "or 'persistable') to the Gen3 StorageClass name.",
132 "BaseSkyMap":
"SkyMap",
133 "BaseCatalog":
"Catalog",
134 "BackgroundList":
"Background",
136 "MultilevelParquetTable":
"DataFrame",
139 doRegisterInstrument = Field(
140 "If True (default), add dimension records for the Instrument and its " 141 "filters and detectors to the registry instead of assuming they are " 146 doWriteCuratedCalibrations = Field(
147 "If True (default), ingest human-curated calibrations directly via " 148 "the Instrument interface. Note that these calibrations are never " 149 "converted from Gen2 repositories.",
154 "The names of reference catalogs (subdirectories under ref_cats) to " 159 fileIgnorePatterns = ListField(
160 "Filename globs that should be ignored instead of being treated as " 163 default=[
"README.txt",
"*~?",
"butler.yaml",
"gen3.sqlite3",
164 "registry.sqlite3",
"calibRegistry.sqlite3",
"_mapper",
165 "_parent",
"repositoryCfg.yaml"]
167 datasetIncludePatterns = ListField(
168 "Glob-style patterns for dataset type names that should be converted.",
172 datasetIgnorePatterns = ListField(
173 "Glob-style patterns for dataset type names that should not be " 174 "converted despite matching a pattern in datasetIncludePatterns.",
179 "Key used for the Gen2 equivalent of 'detector' in data IDs.",
184 "If True (default), only convert datasets that are related to the " 185 "ingested visits. Ignored unless a list of visits is passed to " 193 return self.
raws.transfer
197 self.
raws.transfer = value
201 return self.
raws.instrument
205 self.
raws.instrument = value
215 """A task that converts one or more related Gen2 data repositories to a 216 single Gen3 data repository (with multiple collections). 220 config: `ConvertRepoConfig` 221 Configuration for this task. 222 butler3: `lsst.daf.butler.Butler` 223 Gen3 Butler instance that represents the data repository datasets will 224 be ingested into. The collection and/or run associated with this 225 Butler will be ignored in favor of collections/runs passed via config 228 Other keyword arguments are forwarded to the `Task` constructor. 232 Most of the work of converting repositories is delegated to instances of 233 the `RepoConverter` hierarchy. The `ConvertRepoTask` instance itself holds 234 only state that is relevant for all Gen2 repositories being ingested, while 235 each `RepoConverter` instance holds only state relevant for the conversion 236 of a single Gen2 repository. Both the task and the `RepoConverter` 237 instances are single use; `ConvertRepoTask.run` and most `RepoConverter` 238 methods may only be called once on a particular instance. 241 ConfigClass = ConvertRepoConfig
243 _DefaultName =
"convertRepo" 245 def __init__(self, config=None, *, butler3: Butler3, **kwds):
251 self.makeSubtask(
"raws", butler=butler3)
255 self.
instrument = doImport(self.config.instrument)()
258 for name, config
in self.config.skyMaps.items():
259 instance = config.skyMap.apply()
260 struct =
ConfiguredSkyMap(name=name, sha1=instance.getSha1(), instance=instance)
266 """Return `True` if configuration indicates that the given dataset type 269 This method is intended to be called primarily by the 270 `RepoConverter` instances used interally by the task. 275 Name of the dataset type. 280 Whether the dataset should be included in the conversion. 283 any(fnmatch.fnmatchcase(datasetTypeName, pattern)
284 for pattern
in self.config.datasetIncludePatterns)
and 285 not any(fnmatch.fnmatchcase(datasetTypeName, pattern)
286 for pattern
in self.config.datasetIgnorePatterns)
290 """Indicate that a repository uses the given SkyMap. 292 This method is intended to be called primarily by the 293 `RepoConverter` instances used interally by the task. 297 skyMap : `lsst.skymap.BaseSkyMap` 298 SkyMap instance being used, typically retrieved from a Gen2 304 The name of the skymap in Gen3 data IDs. 306 sha1 = skyMap.getSha1()
309 except KeyError
as err:
310 raise LookupError(f
"SkyMap with sha1={sha1} not included in configuration.")
from err
315 """Register all skymaps that have been marked as used. 317 This method is intended to be called primarily by the 318 `RepoConverter` instances used interally by the task. 322 subset : `ConversionSubset`, optional 323 Object that will be used to filter converted datasets by data ID. 324 If given, it will be updated with the tracts of this skymap that 325 overlap the visits in the subset. 329 struct.instance.register(struct.name, self.
registry)
330 if subset
is not None and self.config.relatedOnly:
331 subset.addSkyMap(self.
registry, struct.name)
334 """Indicate that a repository uses the given SkyPix dimension. 336 This method is intended to be called primarily by the 337 `RepoConverter` instances used interally by the task. 341 dimension : `lsst.daf.butler.SkyPixDimension` 342 Dimension represening a pixelization of the sky. 347 """Register all skymaps that have been marked as used. 349 This method is intended to be called primarily by the 350 `RepoConverter` instances used interally by the task. 354 subset : `ConversionSubset`, optional 355 Object that will be used to filter converted datasets by data ID. 356 If given, it will be updated with the pixelization IDs that 357 overlap the visits in the subset. 359 if subset
is not None and self.config.relatedOnly:
361 subset.addSkyPix(self.
registry, dimension)
363 def run(self, root: str, collections: List[str], *,
364 calibs: Dict[str, List[str]] =
None,
365 reruns: Dict[str, List[str]] =
None,
366 visits: Optional[Iterable[int]] =
None):
367 """Convert a group of related data repositories. 372 Complete path to the root Gen2 data repository. This should be 373 a data repository that includes a Gen2 registry and any raw files 374 and/or reference catalogs. 375 collections : `list` of `str` 376 Gen3 collections that datasets from the root repository should be 377 associated with. This should include any rerun collection that 378 these datasets should also be considered to be part of; because of 379 structural difference between Gen2 parent/child relationships and 380 Gen3 collections, these cannot be reliably inferred. 382 Dictionary mapping calibration repository path to the collections 383 that the repository's datasets should be associated with. The path 384 may be relative to ``root`` or absolute. Collections should 385 include child repository collections as appropriate (see 386 documentation for ``collections``). 388 Dictionary mapping rerun repository path to the collections that 389 the repository's datasets should be associated with. The path may 390 be relative to ``root`` or absolute. Collections should include 391 child repository collections as appropriate (see documentation for 393 visits : iterable of `int`, optional 394 The integer IDs of visits to convert. If not provided, all visits 395 in the Gen2 root repository will be converted. 402 if visits
is not None:
405 if self.config.relatedOnly:
406 self.log.warn(
"config.relatedOnly is True but all visits are being ingested; " 407 "no filtering will be done.")
415 if self.config.doRegisterInstrument:
423 rootConverter =
RootRepoConverter(task=self, root=root, collections=collections, subset=subset)
425 converters.append(rootConverter)
427 for root, collections
in calibs.items():
428 if not os.path.isabs(root):
429 root = os.path.join(rootConverter.root, root)
431 mapper=rootConverter.mapper,
432 subset=rootConverter.subset)
434 converters.append(converter)
436 for root, collections
in reruns.items():
437 if not os.path.isabs(root):
438 root = os.path.join(rootConverter.root, root)
440 subset=rootConverter.subset)
442 converters.append(converter)
459 for converter
in converters:
460 converter.insertDimensionData()
473 for converter
in converters:
474 converter.findDatasets()
477 for converter
in converters:
478 converter.expandDataIds()
481 for converter
in converters:
def isDatasetTypeIncluded