21 from __future__
import annotations
23 __all__ = [
"ConvertRepoConfig",
"ConvertRepoTask",
"ConvertRepoSkyMapConfig"]
27 from dataclasses
import dataclass
28 from typing
import Iterable, Optional, List, Dict
31 from lsst.daf.butler
import (
35 from lsst.pex.config
import Config, ConfigurableField, ConfigDictField, DictField, ListField, Field
36 from lsst.pipe.base
import Task
37 from lsst.skymap
import skyMapRegistry, BaseSkyMap
39 from ..ingest
import RawIngestTask
40 from .repoConverter
import ConversionSubset
41 from .rootRepoConverter
import RootRepoConverter
42 from .calibRepoConverter
import CalibRepoConverter
43 from .standardRepoConverter
import StandardRepoConverter
48 """Struct containing information about a skymap that may appear in a Gen2 53 """Name of the skymap used in Gen3 data IDs. 57 """Hash computed by `BaseSkyMap.getSha1`. 61 """Name of the skymap used in Gen3 data IDs. 65 """Whether this skymap has been found in at least one repository being 71 """Sub-config used to hold the parameters of a SkyMap. 75 This config only needs to exist because we can't put a 76 `~lsst.pex.config.RegistryField` directly inside a 77 `~lsst.pex.config.ConfigDictField`. 79 It needs to have its only field named "skyMap" for compatibility with the 80 configuration of `lsst.pipe.tasks.MakeSkyMapTask`, which we want so we can 81 use one config file in an obs package to configure both. 83 This name leads to unfortunate repetition with the field named 84 "skymap" that holds it - "skyMap[name].skyMap" - but that seems 87 skyMap = skyMapRegistry.makeField(
88 doc=
"Type and parameters for the SkyMap itself.",
94 raws = ConfigurableField(
95 "Configuration for subtask responsible for ingesting raws and adding " 96 "visit and exposure dimension entries.",
99 skyMaps = ConfigDictField(
100 "Mapping from Gen3 skymap name to the parameters used to construct a " 101 "BaseSkyMap instance. This will be used to associate names with " 102 "existing skymaps found in the Gen2 repo.",
104 itemtype=ConvertRepoSkyMapConfig,
107 rootSkyMapName = Field(
108 "Name of a Gen3 skymap (an entry in ``self.skyMaps``) to assume for " 109 "datasets in the root repository when no SkyMap is found there. ",
114 collections = DictField(
115 "Special collections (values) for certain dataset types (keys). " 116 "These are used in addition to rerun collections for datasets in " 117 "reruns. The 'raw' dataset must have an entry here if it is to be " 122 "deepCoadd_skyMap":
"skymaps",
123 "brightObjectMask":
"masks",
126 storageClasses = DictField(
127 "Mapping from dataset type name or Gen2 policy entry (e.g. 'python' " 128 "or 'persistable') to the Gen3 StorageClass name.",
132 "BaseSkyMap":
"SkyMap",
133 "BaseCatalog":
"Catalog",
134 "BackgroundList":
"Background",
136 "MultilevelParquetTable":
"DataFrame",
137 "ParquetTable":
"DataFrame",
141 doRegisterInstrument = Field(
142 "If True (default), add dimension records for the Instrument and its " 143 "filters and detectors to the registry instead of assuming they are " 148 doWriteCuratedCalibrations = Field(
149 "If True (default), ingest human-curated calibrations directly via " 150 "the Instrument interface. Note that these calibrations are never " 151 "converted from Gen2 repositories.",
156 "The names of reference catalogs (subdirectories under ref_cats) to " 161 fileIgnorePatterns = ListField(
162 "Filename globs that should be ignored instead of being treated as " 165 default=[
"README.txt",
"*~?",
"butler.yaml",
"gen3.sqlite3",
166 "registry.sqlite3",
"calibRegistry.sqlite3",
"_mapper",
167 "_parent",
"repositoryCfg.yaml"]
169 datasetIncludePatterns = ListField(
170 "Glob-style patterns for dataset type names that should be converted.",
174 datasetIgnorePatterns = ListField(
175 "Glob-style patterns for dataset type names that should not be " 176 "converted despite matching a pattern in datasetIncludePatterns.",
181 "Key used for the Gen2 equivalent of 'detector' in data IDs.",
186 "If True (default), only convert datasets that are related to the " 187 "ingested visits. Ignored unless a list of visits is passed to " 195 return self.
raws.transfer
199 self.
raws.transfer = value
203 return self.
raws.instrument
207 self.
raws.instrument = value
217 """A task that converts one or more related Gen2 data repositories to a 218 single Gen3 data repository (with multiple collections). 222 config: `ConvertRepoConfig` 223 Configuration for this task. 224 butler3: `lsst.daf.butler.Butler` 225 Gen3 Butler instance that represents the data repository datasets will 226 be ingested into. The collection and/or run associated with this 227 Butler will be ignored in favor of collections/runs passed via config 230 Other keyword arguments are forwarded to the `Task` constructor. 234 Most of the work of converting repositories is delegated to instances of 235 the `RepoConverter` hierarchy. The `ConvertRepoTask` instance itself holds 236 only state that is relevant for all Gen2 repositories being ingested, while 237 each `RepoConverter` instance holds only state relevant for the conversion 238 of a single Gen2 repository. Both the task and the `RepoConverter` 239 instances are single use; `ConvertRepoTask.run` and most `RepoConverter` 240 methods may only be called once on a particular instance. 243 ConfigClass = ConvertRepoConfig
245 _DefaultName =
"convertRepo" 247 def __init__(self, config=None, *, butler3: Butler3, **kwds):
253 self.makeSubtask(
"raws", butler=butler3)
257 self.
instrument = doImport(self.config.instrument)()
260 for name, config
in self.config.skyMaps.items():
261 instance = config.skyMap.apply()
262 struct =
ConfiguredSkyMap(name=name, sha1=instance.getSha1(), instance=instance)
268 """Return `True` if configuration indicates that the given dataset type 271 This method is intended to be called primarily by the 272 `RepoConverter` instances used interally by the task. 277 Name of the dataset type. 282 Whether the dataset should be included in the conversion. 285 any(fnmatch.fnmatchcase(datasetTypeName, pattern)
286 for pattern
in self.config.datasetIncludePatterns)
287 and not any(fnmatch.fnmatchcase(datasetTypeName, pattern)
288 for pattern
in self.config.datasetIgnorePatterns)
292 """Indicate that a repository uses the given SkyMap. 294 This method is intended to be called primarily by the 295 `RepoConverter` instances used interally by the task. 299 skyMap : `lsst.skymap.BaseSkyMap` 300 SkyMap instance being used, typically retrieved from a Gen2 306 The name of the skymap in Gen3 data IDs. 308 sha1 = skyMap.getSha1()
311 except KeyError
as err:
312 raise LookupError(f
"SkyMap with sha1={sha1} not included in configuration.")
from err
317 """Register all skymaps that have been marked as used. 319 This method is intended to be called primarily by the 320 `RepoConverter` instances used interally by the task. 324 subset : `ConversionSubset`, optional 325 Object that will be used to filter converted datasets by data ID. 326 If given, it will be updated with the tracts of this skymap that 327 overlap the visits in the subset. 331 struct.instance.register(struct.name, self.
registry)
332 if subset
is not None and self.config.relatedOnly:
333 subset.addSkyMap(self.
registry, struct.name)
336 """Indicate that a repository uses the given SkyPix dimension. 338 This method is intended to be called primarily by the 339 `RepoConverter` instances used interally by the task. 343 dimension : `lsst.daf.butler.SkyPixDimension` 344 Dimension represening a pixelization of the sky. 349 """Register all skymaps that have been marked as used. 351 This method is intended to be called primarily by the 352 `RepoConverter` instances used interally by the task. 356 subset : `ConversionSubset`, optional 357 Object that will be used to filter converted datasets by data ID. 358 If given, it will be updated with the pixelization IDs that 359 overlap the visits in the subset. 361 if subset
is not None and self.config.relatedOnly:
363 subset.addSkyPix(self.
registry, dimension)
365 def run(self, root: str, collections: List[str], *,
366 calibs: Dict[str, List[str]] =
None,
367 reruns: Dict[str, List[str]] =
None,
368 visits: Optional[Iterable[int]] =
None):
369 """Convert a group of related data repositories. 374 Complete path to the root Gen2 data repository. This should be 375 a data repository that includes a Gen2 registry and any raw files 376 and/or reference catalogs. 377 collections : `list` of `str` 378 Gen3 collections that datasets from the root repository should be 379 associated with. This should include any rerun collection that 380 these datasets should also be considered to be part of; because of 381 structural difference between Gen2 parent/child relationships and 382 Gen3 collections, these cannot be reliably inferred. 384 Dictionary mapping calibration repository path to the collections 385 that the repository's datasets should be associated with. The path 386 may be relative to ``root`` or absolute. Collections should 387 include child repository collections as appropriate (see 388 documentation for ``collections``). 390 Dictionary mapping rerun repository path to the collections that 391 the repository's datasets should be associated with. The path may 392 be relative to ``root`` or absolute. Collections should include 393 child repository collections as appropriate (see documentation for 395 visits : iterable of `int`, optional 396 The integer IDs of visits to convert. If not provided, all visits 397 in the Gen2 root repository will be converted. 404 if visits
is not None:
407 if self.config.relatedOnly:
408 self.log.warn(
"config.relatedOnly is True but all visits are being ingested; " 409 "no filtering will be done.")
417 if self.config.doRegisterInstrument:
425 rootConverter =
RootRepoConverter(task=self, root=root, collections=collections, subset=subset)
427 converters.append(rootConverter)
429 for root, collections
in calibs.items():
430 if not os.path.isabs(root):
431 root = os.path.join(rootConverter.root, root)
433 mapper=rootConverter.mapper,
434 subset=rootConverter.subset)
436 converters.append(converter)
438 for root, collections
in reruns.items():
439 if not os.path.isabs(root):
440 root = os.path.join(rootConverter.root, root)
442 subset=rootConverter.subset)
444 converters.append(converter)
461 for converter
in converters:
462 converter.insertDimensionData()
475 for converter
in converters:
476 converter.findDatasets()
479 for converter
in converters:
480 converter.expandDataIds()
483 for converter
in converters:
def isDatasetTypeIncluded