Coverage for python / lsst / daf / butler / _rubin / transfer_datasets_in_place.py: 43%
21 statements
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-17 08:49 +0000
« prev ^ index » next coverage.py v7.13.5, created at 2026-04-17 08:49 +0000
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This software is dual licensed under the GNU General Public License and also
10# under a 3-clause BSD license. Recipients may choose which of these licenses
11# to use; please see the files gpl-3.0.txt and/or bsd_license.txt,
12# respectively. If you choose the GPL option then the following text applies
13# (but note that there is still no warranty even if you opt for BSD instead):
14#
15# This program is free software: you can redistribute it and/or modify
16# it under the terms of the GNU General Public License as published by
17# the Free Software Foundation, either version 3 of the License, or
18# (at your option) any later version.
19#
20# This program is distributed in the hope that it will be useful,
21# but WITHOUT ANY WARRANTY; without even the implied warranty of
22# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
23# GNU General Public License for more details.
24#
25# You should have received a copy of the GNU General Public License
26# along with this program. If not, see <http://www.gnu.org/licenses/>.
28from __future__ import annotations
30__all__ = ("transfer_datasets_in_place",)
32from collections.abc import Iterable
33from logging import DEBUG
35from .._butler import Butler
36from .._dataset_ref import DatasetRef
37from ..direct_butler import DirectButler
38from ..progress import Progress
41def transfer_datasets_in_place(
42 source_butler: Butler, target_butler: Butler, refs: Iterable[DatasetRef]
43) -> list[DatasetRef]:
44 """Transfer registry and datastore records from one Butler repository to
45 another, with the target datastore sharing artifact files at their original
46 location in the source datastore.
48 Parameters
49 ----------
50 source_butler
51 DirectButler instance from which datasets will be copied.
52 target_butler
53 DirectButler instance to which datasets will be copied.
54 refs
55 List of datasets to be copied.
57 Returns
58 -------
59 transferred_datasets
60 Datasets which were actually transferred. (Excludes those that were
61 already present in the target repository).
63 Notes
64 -----
65 This assumes that ``target_butler`` has a datastore matching the name and
66 type of each datastore configured in ``source_butler``. ``target_butler``
67 should only have read-only access to the datastore root shared with
68 ``source_butler`` -- otherwise, it could accidentally delete files owned by
69 ``source_butler``.
71 Any run collections or dataset types not present in the target datastore
72 will be copied from the source datastore.
74 This operation is idempotent -- any of the given ``refs`` that already
75 exist in the target repository will be skipped.
76 """
77 assert isinstance(source_butler, DirectButler)
78 assert isinstance(target_butler, DirectButler)
79 if not set(target_butler._datastore.names).issuperset(source_butler._datastore.names):
80 # If this constraint was not satisfied, then
81 # Datastore.import_records() would silently drop data for any missing
82 # datastores in the target.
83 raise AssertionError(
84 "Datastore configuration differs between transfer_datasets_in_place() repositories."
85 f" Source: {source_butler._datastore.names} Target: {target_butler._datastore.names}"
86 )
88 target_existence = target_butler._exists_many(refs, full_check=False)
89 new_datasets = [ref for ref, exists in target_existence.items() if not exists]
90 import_info = target_butler._prepare_for_import_refs(
91 source_butler, new_datasets, register_dataset_types=True, transfer_dimensions=False
92 )
93 datastore_export = source_butler._datastore.export_records(new_datasets)
94 with target_butler.transaction():
95 progress = Progress(__name__, level=DEBUG)
96 target_butler._import_grouped_refs(
97 import_info.grouped_refs, source_butler, progress, expand_refs=False
98 )
99 target_butler._datastore.import_records(datastore_export)
101 return new_datasets