Coverage for python/lsst/daf/butler/datastores/fileDatastore.py: 8%

975 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 02:10 -0700

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21from __future__ import annotations 

22 

23"""Generic file-based datastore code.""" 

24 

25__all__ = ("FileDatastore",) 

26 

27import hashlib 

28import logging 

29from collections import defaultdict 

30from collections.abc import Callable 

31from dataclasses import dataclass 

32from typing import ( 

33 TYPE_CHECKING, 

34 Any, 

35 ClassVar, 

36 Dict, 

37 Iterable, 

38 List, 

39 Mapping, 

40 Optional, 

41 Sequence, 

42 Set, 

43 Tuple, 

44 Type, 

45 Union, 

46) 

47 

48from lsst.daf.butler import ( 

49 CompositesMap, 

50 Config, 

51 DatasetId, 

52 DatasetRef, 

53 DatasetRefURIs, 

54 DatasetType, 

55 DatasetTypeNotSupportedError, 

56 Datastore, 

57 DatastoreCacheManager, 

58 DatastoreConfig, 

59 DatastoreDisabledCacheManager, 

60 DatastoreRecordData, 

61 DatastoreValidationError, 

62 FileDataset, 

63 FileDescriptor, 

64 FileTemplates, 

65 FileTemplateValidationError, 

66 Formatter, 

67 FormatterFactory, 

68 Location, 

69 LocationFactory, 

70 Progress, 

71 StorageClass, 

72 StoredDatastoreItemInfo, 

73 StoredFileInfo, 

74 ddl, 

75) 

76from lsst.daf.butler.core.repoRelocation import replaceRoot 

77from lsst.daf.butler.core.utils import transactional 

78from lsst.daf.butler.registry.interfaces import DatastoreRegistryBridge, ReadOnlyDatabaseError 

79from lsst.resources import ResourcePath, ResourcePathExpression 

80from lsst.utils.introspection import get_class_of, get_instance_of 

81from lsst.utils.iteration import chunk_iterable 

82 

83# For VERBOSE logging usage. 

84from lsst.utils.logging import VERBOSE, getLogger 

85from lsst.utils.timer import time_this 

86from sqlalchemy import BigInteger, String 

87 

88from ..registry.interfaces import FakeDatasetRef 

89from .genericDatastore import GenericBaseDatastore 

90 

91if TYPE_CHECKING: 

92 from lsst.daf.butler import AbstractDatastoreCacheManager, LookupKey 

93 from lsst.daf.butler.registry.interfaces import DatasetIdRef, DatastoreRegistryBridgeManager 

94 

95log = getLogger(__name__) 

96 

97 

98class _IngestPrepData(Datastore.IngestPrepData): 

99 """Helper class for FileDatastore ingest implementation. 

100 

101 Parameters 

102 ---------- 

103 datasets : `list` of `FileDataset` 

104 Files to be ingested by this datastore. 

105 """ 

106 

107 def __init__(self, datasets: List[FileDataset]): 

108 super().__init__(ref for dataset in datasets for ref in dataset.refs) 

109 self.datasets = datasets 

110 

111 

112@dataclass(frozen=True) 

113class DatastoreFileGetInformation: 

114 """Collection of useful parameters needed to retrieve a file from 

115 a Datastore. 

116 """ 

117 

118 location: Location 

119 """The location from which to read the dataset.""" 

120 

121 formatter: Formatter 

122 """The `Formatter` to use to deserialize the dataset.""" 

123 

124 info: StoredFileInfo 

125 """Stored information about this file and its formatter.""" 

126 

127 assemblerParams: Mapping[str, Any] 

128 """Parameters to use for post-processing the retrieved dataset.""" 

129 

130 formatterParams: Mapping[str, Any] 

131 """Parameters that were understood by the associated formatter.""" 

132 

133 component: Optional[str] 

134 """The component to be retrieved (can be `None`).""" 

135 

136 readStorageClass: StorageClass 

137 """The `StorageClass` of the dataset being read.""" 

138 

139 

140class FileDatastore(GenericBaseDatastore): 

141 """Generic Datastore for file-based implementations. 

142 

143 Should always be sub-classed since key abstract methods are missing. 

144 

145 Parameters 

146 ---------- 

147 config : `DatastoreConfig` or `str` 

148 Configuration as either a `Config` object or URI to file. 

149 bridgeManager : `DatastoreRegistryBridgeManager` 

150 Object that manages the interface between `Registry` and datastores. 

151 butlerRoot : `str`, optional 

152 New datastore root to use to override the configuration value. 

153 

154 Raises 

155 ------ 

156 ValueError 

157 If root location does not exist and ``create`` is `False` in the 

158 configuration. 

159 """ 

160 

161 defaultConfigFile: ClassVar[Optional[str]] = None 

162 """Path to configuration defaults. Accessed within the ``config`` resource 

163 or relative to a search path. Can be None if no defaults specified. 

164 """ 

165 

166 root: ResourcePath 

167 """Root directory URI of this `Datastore`.""" 

168 

169 locationFactory: LocationFactory 

170 """Factory for creating locations relative to the datastore root.""" 

171 

172 formatterFactory: FormatterFactory 

173 """Factory for creating instances of formatters.""" 

174 

175 templates: FileTemplates 

176 """File templates that can be used by this `Datastore`.""" 

177 

178 composites: CompositesMap 

179 """Determines whether a dataset should be disassembled on put.""" 

180 

181 defaultConfigFile = "datastores/fileDatastore.yaml" 

182 """Path to configuration defaults. Accessed within the ``config`` resource 

183 or relative to a search path. Can be None if no defaults specified. 

184 """ 

185 

186 _retrieve_dataset_method: Callable[[str], DatasetType | None] | None = None 

187 """Callable that is used in trusted mode to retrieve registry definition 

188 of a named dataset type. 

189 """ 

190 

191 @classmethod 

192 def setConfigRoot(cls, root: str, config: Config, full: Config, overwrite: bool = True) -> None: 

193 """Set any filesystem-dependent config options for this Datastore to 

194 be appropriate for a new empty repository with the given root. 

195 

196 Parameters 

197 ---------- 

198 root : `str` 

199 URI to the root of the data repository. 

200 config : `Config` 

201 A `Config` to update. Only the subset understood by 

202 this component will be updated. Will not expand 

203 defaults. 

204 full : `Config` 

205 A complete config with all defaults expanded that can be 

206 converted to a `DatastoreConfig`. Read-only and will not be 

207 modified by this method. 

208 Repository-specific options that should not be obtained 

209 from defaults when Butler instances are constructed 

210 should be copied from ``full`` to ``config``. 

211 overwrite : `bool`, optional 

212 If `False`, do not modify a value in ``config`` if the value 

213 already exists. Default is always to overwrite with the provided 

214 ``root``. 

215 

216 Notes 

217 ----- 

218 If a keyword is explicitly defined in the supplied ``config`` it 

219 will not be overridden by this method if ``overwrite`` is `False`. 

220 This allows explicit values set in external configs to be retained. 

221 """ 

222 Config.updateParameters( 

223 DatastoreConfig, 

224 config, 

225 full, 

226 toUpdate={"root": root}, 

227 toCopy=("cls", ("records", "table")), 

228 overwrite=overwrite, 

229 ) 

230 

231 @classmethod 

232 def makeTableSpec(cls, datasetIdColumnType: type) -> ddl.TableSpec: 

233 return ddl.TableSpec( 

234 fields=[ 

235 ddl.FieldSpec(name="dataset_id", dtype=datasetIdColumnType, primaryKey=True), 

236 ddl.FieldSpec(name="path", dtype=String, length=256, nullable=False), 

237 ddl.FieldSpec(name="formatter", dtype=String, length=128, nullable=False), 

238 ddl.FieldSpec(name="storage_class", dtype=String, length=64, nullable=False), 

239 # Use empty string to indicate no component 

240 ddl.FieldSpec(name="component", dtype=String, length=32, primaryKey=True), 

241 # TODO: should checksum be Base64Bytes instead? 

242 ddl.FieldSpec(name="checksum", dtype=String, length=128, nullable=True), 

243 ddl.FieldSpec(name="file_size", dtype=BigInteger, nullable=True), 

244 ], 

245 unique=frozenset(), 

246 indexes=[ddl.IndexSpec("path")], 

247 ) 

248 

249 def __init__( 

250 self, 

251 config: Union[DatastoreConfig, str], 

252 bridgeManager: DatastoreRegistryBridgeManager, 

253 butlerRoot: str | None = None, 

254 ): 

255 super().__init__(config, bridgeManager) 

256 if "root" not in self.config: 

257 raise ValueError("No root directory specified in configuration") 

258 

259 self._bridgeManager = bridgeManager 

260 

261 # Name ourselves either using an explicit name or a name 

262 # derived from the (unexpanded) root 

263 if "name" in self.config: 

264 self.name = self.config["name"] 

265 else: 

266 # We use the unexpanded root in the name to indicate that this 

267 # datastore can be moved without having to update registry. 

268 self.name = "{}@{}".format(type(self).__name__, self.config["root"]) 

269 

270 # Support repository relocation in config 

271 # Existence of self.root is checked in subclass 

272 self.root = ResourcePath( 

273 replaceRoot(self.config["root"], butlerRoot), forceDirectory=True, forceAbsolute=True 

274 ) 

275 

276 self.locationFactory = LocationFactory(self.root) 

277 self.formatterFactory = FormatterFactory() 

278 

279 # Now associate formatters with storage classes 

280 self.formatterFactory.registerFormatters(self.config["formatters"], universe=bridgeManager.universe) 

281 

282 # Read the file naming templates 

283 self.templates = FileTemplates(self.config["templates"], universe=bridgeManager.universe) 

284 

285 # See if composites should be disassembled 

286 self.composites = CompositesMap(self.config["composites"], universe=bridgeManager.universe) 

287 

288 tableName = self.config["records", "table"] 

289 try: 

290 # Storage of paths and formatters, keyed by dataset_id 

291 self._table = bridgeManager.opaque.register( 

292 tableName, self.makeTableSpec(bridgeManager.datasetIdColumnType) 

293 ) 

294 # Interface to Registry. 

295 self._bridge = bridgeManager.register(self.name) 

296 except ReadOnlyDatabaseError: 

297 # If the database is read only and we just tried and failed to 

298 # create a table, it means someone is trying to create a read-only 

299 # butler client for an empty repo. That should be okay, as long 

300 # as they then try to get any datasets before some other client 

301 # creates the table. Chances are they'rejust validating 

302 # configuration. 

303 pass 

304 

305 # Determine whether checksums should be used - default to False 

306 self.useChecksum = self.config.get("checksum", False) 

307 

308 # Determine whether we can fall back to configuration if a 

309 # requested dataset is not known to registry 

310 self.trustGetRequest = self.config.get("trust_get_request", False) 

311 

312 # Create a cache manager 

313 self.cacheManager: AbstractDatastoreCacheManager 

314 if "cached" in self.config: 

315 self.cacheManager = DatastoreCacheManager(self.config["cached"], universe=bridgeManager.universe) 

316 else: 

317 self.cacheManager = DatastoreDisabledCacheManager("", universe=bridgeManager.universe) 

318 

319 # Check existence and create directory structure if necessary 

320 if not self.root.exists(): 

321 if "create" not in self.config or not self.config["create"]: 

322 raise ValueError(f"No valid root and not allowed to create one at: {self.root}") 

323 try: 

324 self.root.mkdir() 

325 except Exception as e: 

326 raise ValueError( 

327 f"Can not create datastore root '{self.root}', check permissions. Got error: {e}" 

328 ) from e 

329 

330 def __str__(self) -> str: 

331 return str(self.root) 

332 

333 @property 

334 def bridge(self) -> DatastoreRegistryBridge: 

335 return self._bridge 

336 

337 def _artifact_exists(self, location: Location) -> bool: 

338 """Check that an artifact exists in this datastore at the specified 

339 location. 

340 

341 Parameters 

342 ---------- 

343 location : `Location` 

344 Expected location of the artifact associated with this datastore. 

345 

346 Returns 

347 ------- 

348 exists : `bool` 

349 True if the location can be found, false otherwise. 

350 """ 

351 log.debug("Checking if resource exists: %s", location.uri) 

352 return location.uri.exists() 

353 

354 def _delete_artifact(self, location: Location) -> None: 

355 """Delete the artifact from the datastore. 

356 

357 Parameters 

358 ---------- 

359 location : `Location` 

360 Location of the artifact associated with this datastore. 

361 """ 

362 if location.pathInStore.isabs(): 

363 raise RuntimeError(f"Cannot delete artifact with absolute uri {location.uri}.") 

364 

365 try: 

366 location.uri.remove() 

367 except FileNotFoundError: 

368 log.debug("File %s did not exist and so could not be deleted.", location.uri) 

369 raise 

370 except Exception as e: 

371 log.critical("Failed to delete file: %s (%s)", location.uri, e) 

372 raise 

373 log.debug("Successfully deleted file: %s", location.uri) 

374 

375 def addStoredItemInfo(self, refs: Iterable[DatasetRef], infos: Iterable[StoredFileInfo]) -> None: 

376 # Docstring inherited from GenericBaseDatastore 

377 records = [info.rebase(ref).to_record() for ref, info in zip(refs, infos)] 

378 self._table.insert(*records, transaction=self._transaction) 

379 

380 def getStoredItemsInfo(self, ref: DatasetIdRef) -> List[StoredFileInfo]: 

381 # Docstring inherited from GenericBaseDatastore 

382 

383 # Look for the dataset_id -- there might be multiple matches 

384 # if we have disassembled the dataset. 

385 records = self._table.fetch(dataset_id=ref.id) 

386 return [StoredFileInfo.from_record(record) for record in records] 

387 

388 def _get_stored_records_associated_with_refs( 

389 self, refs: Iterable[DatasetIdRef] 

390 ) -> Dict[DatasetId, List[StoredFileInfo]]: 

391 """Retrieve all records associated with the provided refs. 

392 

393 Parameters 

394 ---------- 

395 refs : iterable of `DatasetIdRef` 

396 The refs for which records are to be retrieved. 

397 

398 Returns 

399 ------- 

400 records : `dict` of [`DatasetId`, `list` of `StoredFileInfo`] 

401 The matching records indexed by the ref ID. The number of entries 

402 in the dict can be smaller than the number of requested refs. 

403 """ 

404 records = self._table.fetch(dataset_id=[ref.id for ref in refs]) 

405 

406 # Uniqueness is dataset_id + component so can have multiple records 

407 # per ref. 

408 records_by_ref = defaultdict(list) 

409 for record in records: 

410 records_by_ref[record["dataset_id"]].append(StoredFileInfo.from_record(record)) 

411 return records_by_ref 

412 

413 def _refs_associated_with_artifacts( 

414 self, paths: List[Union[str, ResourcePath]] 

415 ) -> Dict[str, Set[DatasetId]]: 

416 """Return paths and associated dataset refs. 

417 

418 Parameters 

419 ---------- 

420 paths : `list` of `str` or `lsst.resources.ResourcePath` 

421 All the paths to include in search. 

422 

423 Returns 

424 ------- 

425 mapping : `dict` of [`str`, `set` [`DatasetId`]] 

426 Mapping of each path to a set of associated database IDs. 

427 """ 

428 records = self._table.fetch(path=[str(path) for path in paths]) 

429 result = defaultdict(set) 

430 for row in records: 

431 result[row["path"]].add(row["dataset_id"]) 

432 return result 

433 

434 def _registered_refs_per_artifact(self, pathInStore: ResourcePath) -> Set[DatasetId]: 

435 """Return all dataset refs associated with the supplied path. 

436 

437 Parameters 

438 ---------- 

439 pathInStore : `lsst.resources.ResourcePath` 

440 Path of interest in the data store. 

441 

442 Returns 

443 ------- 

444 ids : `set` of `int` 

445 All `DatasetRef` IDs associated with this path. 

446 """ 

447 records = list(self._table.fetch(path=str(pathInStore))) 

448 ids = {r["dataset_id"] for r in records} 

449 return ids 

450 

451 def removeStoredItemInfo(self, ref: DatasetIdRef) -> None: 

452 # Docstring inherited from GenericBaseDatastore 

453 self._table.delete(["dataset_id"], {"dataset_id": ref.id}) 

454 

455 def _get_dataset_locations_info(self, ref: DatasetIdRef) -> List[Tuple[Location, StoredFileInfo]]: 

456 r"""Find all the `Location`\ s of the requested dataset in the 

457 `Datastore` and the associated stored file information. 

458 

459 Parameters 

460 ---------- 

461 ref : `DatasetRef` 

462 Reference to the required `Dataset`. 

463 

464 Returns 

465 ------- 

466 results : `list` [`tuple` [`Location`, `StoredFileInfo` ]] 

467 Location of the dataset within the datastore and 

468 stored information about each file and its formatter. 

469 """ 

470 # Get the file information (this will fail if no file) 

471 records = self.getStoredItemsInfo(ref) 

472 

473 # Use the path to determine the location -- we need to take 

474 # into account absolute URIs in the datastore record 

475 return [(r.file_location(self.locationFactory), r) for r in records] 

476 

477 def _can_remove_dataset_artifact(self, ref: DatasetIdRef, location: Location) -> bool: 

478 """Check that there is only one dataset associated with the 

479 specified artifact. 

480 

481 Parameters 

482 ---------- 

483 ref : `DatasetRef` or `FakeDatasetRef` 

484 Dataset to be removed. 

485 location : `Location` 

486 The location of the artifact to be removed. 

487 

488 Returns 

489 ------- 

490 can_remove : `Bool` 

491 True if the artifact can be safely removed. 

492 """ 

493 # Can't ever delete absolute URIs. 

494 if location.pathInStore.isabs(): 

495 return False 

496 

497 # Get all entries associated with this path 

498 allRefs = self._registered_refs_per_artifact(location.pathInStore) 

499 if not allRefs: 

500 raise RuntimeError(f"Datastore inconsistency error. {location.pathInStore} not in registry") 

501 

502 # Remove these refs from all the refs and if there is nothing left 

503 # then we can delete 

504 remainingRefs = allRefs - {ref.id} 

505 

506 if remainingRefs: 

507 return False 

508 return True 

509 

510 def _get_expected_dataset_locations_info(self, ref: DatasetRef) -> List[Tuple[Location, StoredFileInfo]]: 

511 """Predict the location and related file information of the requested 

512 dataset in this datastore. 

513 

514 Parameters 

515 ---------- 

516 ref : `DatasetRef` 

517 Reference to the required `Dataset`. 

518 

519 Returns 

520 ------- 

521 results : `list` [`tuple` [`Location`, `StoredFileInfo` ]] 

522 Expected Location of the dataset within the datastore and 

523 placeholder information about each file and its formatter. 

524 

525 Notes 

526 ----- 

527 Uses the current configuration to determine how we would expect the 

528 datastore files to have been written if we couldn't ask registry. 

529 This is safe so long as there has been no change to datastore 

530 configuration between writing the dataset and wanting to read it. 

531 Will not work for files that have been ingested without using the 

532 standard file template or default formatter. 

533 """ 

534 

535 # If we have a component ref we always need to ask the questions 

536 # of the composite. If the composite is disassembled this routine 

537 # should return all components. If the composite was not 

538 # disassembled the composite is what is stored regardless of 

539 # component request. Note that if the caller has disassembled 

540 # a composite there is no way for this guess to know that 

541 # without trying both the composite and component ref and seeing 

542 # if there is something at the component Location even without 

543 # disassembly being enabled. 

544 if ref.datasetType.isComponent(): 

545 ref = ref.makeCompositeRef() 

546 

547 # See if the ref is a composite that should be disassembled 

548 doDisassembly = self.composites.shouldBeDisassembled(ref) 

549 

550 all_info: List[Tuple[Location, Formatter, StorageClass, Optional[str]]] = [] 

551 

552 if doDisassembly: 

553 for component, componentStorage in ref.datasetType.storageClass.components.items(): 

554 compRef = ref.makeComponentRef(component) 

555 location, formatter = self._determine_put_formatter_location(compRef) 

556 all_info.append((location, formatter, componentStorage, component)) 

557 

558 else: 

559 # Always use the composite ref if no disassembly 

560 location, formatter = self._determine_put_formatter_location(ref) 

561 all_info.append((location, formatter, ref.datasetType.storageClass, None)) 

562 

563 # Convert the list of tuples to have StoredFileInfo as second element 

564 return [ 

565 ( 

566 location, 

567 StoredFileInfo( 

568 formatter=formatter, 

569 path=location.pathInStore.path, 

570 storageClass=storageClass, 

571 component=component, 

572 checksum=None, 

573 file_size=-1, 

574 dataset_id=ref.id, 

575 ), 

576 ) 

577 for location, formatter, storageClass, component in all_info 

578 ] 

579 

580 def _prepare_for_get( 

581 self, ref: DatasetRef, parameters: Optional[Mapping[str, Any]] = None 

582 ) -> List[DatastoreFileGetInformation]: 

583 """Check parameters for ``get`` and obtain formatter and 

584 location. 

585 

586 Parameters 

587 ---------- 

588 ref : `DatasetRef` 

589 Reference to the required Dataset. 

590 parameters : `dict` 

591 `StorageClass`-specific parameters that specify, for example, 

592 a slice of the dataset to be loaded. 

593 

594 Returns 

595 ------- 

596 getInfo : `list` [`DatastoreFileGetInformation`] 

597 Parameters needed to retrieve each file. 

598 """ 

599 log.debug("Retrieve %s from %s with parameters %s", ref, self.name, parameters) 

600 

601 # For trusted mode need to reset storage class. 

602 ref = self._cast_storage_class(ref) 

603 

604 # Get file metadata and internal metadata 

605 fileLocations = self._get_dataset_locations_info(ref) 

606 if not fileLocations: 

607 if not self.trustGetRequest: 

608 raise FileNotFoundError(f"Could not retrieve dataset {ref}.") 

609 # Assume the dataset is where we think it should be 

610 fileLocations = self._get_expected_dataset_locations_info(ref) 

611 

612 # The storage class we want to use eventually 

613 refStorageClass = ref.datasetType.storageClass 

614 

615 if len(fileLocations) > 1: 

616 disassembled = True 

617 

618 # If trust is involved it is possible that there will be 

619 # components listed here that do not exist in the datastore. 

620 # Explicitly check for file artifact existence and filter out any 

621 # that are missing. 

622 if self.trustGetRequest: 

623 fileLocations = [loc for loc in fileLocations if loc[0].uri.exists()] 

624 

625 # For now complain only if we have no components at all. One 

626 # component is probably a problem but we can punt that to the 

627 # assembler. 

628 if not fileLocations: 

629 raise FileNotFoundError(f"None of the component files for dataset {ref} exist.") 

630 

631 else: 

632 disassembled = False 

633 

634 # Is this a component request? 

635 refComponent = ref.datasetType.component() 

636 

637 fileGetInfo = [] 

638 for location, storedFileInfo in fileLocations: 

639 # The storage class used to write the file 

640 writeStorageClass = storedFileInfo.storageClass 

641 

642 # If this has been disassembled we need read to match the write 

643 if disassembled: 

644 readStorageClass = writeStorageClass 

645 else: 

646 readStorageClass = refStorageClass 

647 

648 formatter = get_instance_of( 

649 storedFileInfo.formatter, 

650 FileDescriptor( 

651 location, 

652 readStorageClass=readStorageClass, 

653 storageClass=writeStorageClass, 

654 parameters=parameters, 

655 ), 

656 ref.dataId, 

657 ) 

658 

659 formatterParams, notFormatterParams = formatter.segregateParameters() 

660 

661 # Of the remaining parameters, extract the ones supported by 

662 # this StorageClass (for components not all will be handled) 

663 assemblerParams = readStorageClass.filterParameters(notFormatterParams) 

664 

665 # The ref itself could be a component if the dataset was 

666 # disassembled by butler, or we disassembled in datastore and 

667 # components came from the datastore records 

668 component = storedFileInfo.component if storedFileInfo.component else refComponent 

669 

670 fileGetInfo.append( 

671 DatastoreFileGetInformation( 

672 location, 

673 formatter, 

674 storedFileInfo, 

675 assemblerParams, 

676 formatterParams, 

677 component, 

678 readStorageClass, 

679 ) 

680 ) 

681 

682 return fileGetInfo 

683 

684 def _prepare_for_put(self, inMemoryDataset: Any, ref: DatasetRef) -> Tuple[Location, Formatter]: 

685 """Check the arguments for ``put`` and obtain formatter and 

686 location. 

687 

688 Parameters 

689 ---------- 

690 inMemoryDataset : `object` 

691 The dataset to store. 

692 ref : `DatasetRef` 

693 Reference to the associated Dataset. 

694 

695 Returns 

696 ------- 

697 location : `Location` 

698 The location to write the dataset. 

699 formatter : `Formatter` 

700 The `Formatter` to use to write the dataset. 

701 

702 Raises 

703 ------ 

704 TypeError 

705 Supplied object and storage class are inconsistent. 

706 DatasetTypeNotSupportedError 

707 The associated `DatasetType` is not handled by this datastore. 

708 """ 

709 self._validate_put_parameters(inMemoryDataset, ref) 

710 return self._determine_put_formatter_location(ref) 

711 

712 def _determine_put_formatter_location(self, ref: DatasetRef) -> Tuple[Location, Formatter]: 

713 """Calculate the formatter and output location to use for put. 

714 

715 Parameters 

716 ---------- 

717 ref : `DatasetRef` 

718 Reference to the associated Dataset. 

719 

720 Returns 

721 ------- 

722 location : `Location` 

723 The location to write the dataset. 

724 formatter : `Formatter` 

725 The `Formatter` to use to write the dataset. 

726 """ 

727 # Work out output file name 

728 try: 

729 template = self.templates.getTemplate(ref) 

730 except KeyError as e: 

731 raise DatasetTypeNotSupportedError(f"Unable to find template for {ref}") from e 

732 

733 # Validate the template to protect against filenames from different 

734 # dataIds returning the same and causing overwrite confusion. 

735 template.validateTemplate(ref) 

736 

737 location = self.locationFactory.fromPath(template.format(ref)) 

738 

739 # Get the formatter based on the storage class 

740 storageClass = ref.datasetType.storageClass 

741 try: 

742 formatter = self.formatterFactory.getFormatter( 

743 ref, FileDescriptor(location, storageClass=storageClass), ref.dataId 

744 ) 

745 except KeyError as e: 

746 raise DatasetTypeNotSupportedError( 

747 f"Unable to find formatter for {ref} in datastore {self.name}" 

748 ) from e 

749 

750 # Now that we know the formatter, update the location 

751 location = formatter.makeUpdatedLocation(location) 

752 

753 return location, formatter 

754 

755 def _overrideTransferMode(self, *datasets: FileDataset, transfer: Optional[str] = None) -> Optional[str]: 

756 # Docstring inherited from base class 

757 if transfer != "auto": 

758 return transfer 

759 

760 # See if the paths are within the datastore or not 

761 inside = [self._pathInStore(d.path) is not None for d in datasets] 

762 

763 if all(inside): 

764 transfer = None 

765 elif not any(inside): 

766 # Allow ResourcePath to use its own knowledge 

767 transfer = "auto" 

768 else: 

769 # This can happen when importing from a datastore that 

770 # has had some datasets ingested using "direct" mode. 

771 # Also allow ResourcePath to sort it out but warn about it. 

772 # This can happen if you are importing from a datastore 

773 # that had some direct transfer datasets. 

774 log.warning( 

775 "Some datasets are inside the datastore and some are outside. Using 'split' " 

776 "transfer mode. This assumes that the files outside the datastore are " 

777 "still accessible to the new butler since they will not be copied into " 

778 "the target datastore." 

779 ) 

780 transfer = "split" 

781 

782 return transfer 

783 

784 def _pathInStore(self, path: ResourcePathExpression) -> Optional[str]: 

785 """Return path relative to datastore root 

786 

787 Parameters 

788 ---------- 

789 path : `lsst.resources.ResourcePathExpression` 

790 Path to dataset. Can be absolute URI. If relative assumed to 

791 be relative to the datastore. Returns path in datastore 

792 or raises an exception if the path it outside. 

793 

794 Returns 

795 ------- 

796 inStore : `str` 

797 Path relative to datastore root. Returns `None` if the file is 

798 outside the root. 

799 """ 

800 # Relative path will always be relative to datastore 

801 pathUri = ResourcePath(path, forceAbsolute=False) 

802 return pathUri.relative_to(self.root) 

803 

804 def _standardizeIngestPath( 

805 self, path: Union[str, ResourcePath], *, transfer: Optional[str] = None 

806 ) -> Union[str, ResourcePath]: 

807 """Standardize the path of a to-be-ingested file. 

808 

809 Parameters 

810 ---------- 

811 path : `str` or `lsst.resources.ResourcePath` 

812 Path of a file to be ingested. This parameter is not expected 

813 to be all the types that can be used to construct a 

814 `~lsst.resources.ResourcePath`. 

815 transfer : `str`, optional 

816 How (and whether) the dataset should be added to the datastore. 

817 See `ingest` for details of transfer modes. 

818 This implementation is provided only so 

819 `NotImplementedError` can be raised if the mode is not supported; 

820 actual transfers are deferred to `_extractIngestInfo`. 

821 

822 Returns 

823 ------- 

824 path : `str` or `lsst.resources.ResourcePath` 

825 New path in what the datastore considers standard form. If an 

826 absolute URI was given that will be returned unchanged. 

827 

828 Notes 

829 ----- 

830 Subclasses of `FileDatastore` can implement this method instead 

831 of `_prepIngest`. It should not modify the data repository or given 

832 file in any way. 

833 

834 Raises 

835 ------ 

836 NotImplementedError 

837 Raised if the datastore does not support the given transfer mode 

838 (including the case where ingest is not supported at all). 

839 FileNotFoundError 

840 Raised if one of the given files does not exist. 

841 """ 

842 if transfer not in (None, "direct", "split") + self.root.transferModes: 

843 raise NotImplementedError(f"Transfer mode {transfer} not supported.") 

844 

845 # A relative URI indicates relative to datastore root 

846 srcUri = ResourcePath(path, forceAbsolute=False) 

847 if not srcUri.isabs(): 

848 srcUri = self.root.join(path) 

849 

850 if not srcUri.exists(): 

851 raise FileNotFoundError( 

852 f"Resource at {srcUri} does not exist; note that paths to ingest " 

853 f"are assumed to be relative to {self.root} unless they are absolute." 

854 ) 

855 

856 if transfer is None: 

857 relpath = srcUri.relative_to(self.root) 

858 if not relpath: 

859 raise RuntimeError( 

860 f"Transfer is none but source file ({srcUri}) is not within datastore ({self.root})" 

861 ) 

862 

863 # Return the relative path within the datastore for internal 

864 # transfer 

865 path = relpath 

866 

867 return path 

868 

869 def _extractIngestInfo( 

870 self, 

871 path: ResourcePathExpression, 

872 ref: DatasetRef, 

873 *, 

874 formatter: Union[Formatter, Type[Formatter]], 

875 transfer: Optional[str] = None, 

876 record_validation_info: bool = True, 

877 ) -> StoredFileInfo: 

878 """Relocate (if necessary) and extract `StoredFileInfo` from a 

879 to-be-ingested file. 

880 

881 Parameters 

882 ---------- 

883 path : `lsst.resources.ResourcePathExpression` 

884 URI or path of a file to be ingested. 

885 ref : `DatasetRef` 

886 Reference for the dataset being ingested. Guaranteed to have 

887 ``dataset_id not None`. 

888 formatter : `type` or `Formatter` 

889 `Formatter` subclass to use for this dataset or an instance. 

890 transfer : `str`, optional 

891 How (and whether) the dataset should be added to the datastore. 

892 See `ingest` for details of transfer modes. 

893 record_validation_info : `bool`, optional 

894 If `True`, the default, the datastore can record validation 

895 information associated with the file. If `False` the datastore 

896 will not attempt to track any information such as checksums 

897 or file sizes. This can be useful if such information is tracked 

898 in an external system or if the file is to be compressed in place. 

899 It is up to the datastore whether this parameter is relevant. 

900 

901 Returns 

902 ------- 

903 info : `StoredFileInfo` 

904 Internal datastore record for this file. This will be inserted by 

905 the caller; the `_extractIngestInfo` is only responsible for 

906 creating and populating the struct. 

907 

908 Raises 

909 ------ 

910 FileNotFoundError 

911 Raised if one of the given files does not exist. 

912 FileExistsError 

913 Raised if transfer is not `None` but the (internal) location the 

914 file would be moved to is already occupied. 

915 """ 

916 if self._transaction is None: 

917 raise RuntimeError("Ingest called without transaction enabled") 

918 

919 # Create URI of the source path, do not need to force a relative 

920 # path to absolute. 

921 srcUri = ResourcePath(path, forceAbsolute=False) 

922 

923 # Track whether we have read the size of the source yet 

924 have_sized = False 

925 

926 tgtLocation: Optional[Location] 

927 if transfer is None or transfer == "split": 

928 # A relative path is assumed to be relative to the datastore 

929 # in this context 

930 if not srcUri.isabs(): 

931 tgtLocation = self.locationFactory.fromPath(srcUri.ospath) 

932 else: 

933 # Work out the path in the datastore from an absolute URI 

934 # This is required to be within the datastore. 

935 pathInStore = srcUri.relative_to(self.root) 

936 if pathInStore is None and transfer is None: 

937 raise RuntimeError( 

938 f"Unexpectedly learned that {srcUri} is not within datastore {self.root}" 

939 ) 

940 if pathInStore: 

941 tgtLocation = self.locationFactory.fromPath(pathInStore) 

942 elif transfer == "split": 

943 # Outside the datastore but treat that as a direct ingest 

944 # instead. 

945 tgtLocation = None 

946 else: 

947 raise RuntimeError(f"Unexpected transfer mode encountered: {transfer} for URI {srcUri}") 

948 elif transfer == "direct": 

949 # Want to store the full URI to the resource directly in 

950 # datastore. This is useful for referring to permanent archive 

951 # storage for raw data. 

952 # Trust that people know what they are doing. 

953 tgtLocation = None 

954 else: 

955 # Work out the name we want this ingested file to have 

956 # inside the datastore 

957 tgtLocation = self._calculate_ingested_datastore_name(srcUri, ref, formatter) 

958 if not tgtLocation.uri.dirname().exists(): 

959 log.debug("Folder %s does not exist yet.", tgtLocation.uri.dirname()) 

960 tgtLocation.uri.dirname().mkdir() 

961 

962 # if we are transferring from a local file to a remote location 

963 # it may be more efficient to get the size and checksum of the 

964 # local file rather than the transferred one 

965 if record_validation_info and srcUri.isLocal: 

966 size = srcUri.size() 

967 checksum = self.computeChecksum(srcUri) if self.useChecksum else None 

968 have_sized = True 

969 

970 # Transfer the resource to the destination. 

971 # Allow overwrite of an existing file. This matches the behavior 

972 # of datastore.put() in that it trusts that registry would not 

973 # be asking to overwrite unless registry thought that the 

974 # overwrite was allowed. 

975 tgtLocation.uri.transfer_from( 

976 srcUri, transfer=transfer, transaction=self._transaction, overwrite=True 

977 ) 

978 

979 if tgtLocation is None: 

980 # This means we are using direct mode 

981 targetUri = srcUri 

982 targetPath = str(srcUri) 

983 else: 

984 targetUri = tgtLocation.uri 

985 targetPath = tgtLocation.pathInStore.path 

986 

987 # the file should exist in the datastore now 

988 if record_validation_info: 

989 if not have_sized: 

990 size = targetUri.size() 

991 checksum = self.computeChecksum(targetUri) if self.useChecksum else None 

992 else: 

993 # Not recording any file information. 

994 size = -1 

995 checksum = None 

996 

997 return StoredFileInfo( 

998 formatter=formatter, 

999 path=targetPath, 

1000 storageClass=ref.datasetType.storageClass, 

1001 component=ref.datasetType.component(), 

1002 file_size=size, 

1003 checksum=checksum, 

1004 dataset_id=ref.id, 

1005 ) 

1006 

1007 def _prepIngest(self, *datasets: FileDataset, transfer: Optional[str] = None) -> _IngestPrepData: 

1008 # Docstring inherited from Datastore._prepIngest. 

1009 filtered = [] 

1010 for dataset in datasets: 

1011 acceptable = [ref for ref in dataset.refs if self.constraints.isAcceptable(ref)] 

1012 if not acceptable: 

1013 continue 

1014 else: 

1015 dataset.refs = acceptable 

1016 if dataset.formatter is None: 

1017 dataset.formatter = self.formatterFactory.getFormatterClass(dataset.refs[0]) 

1018 else: 

1019 assert isinstance(dataset.formatter, (type, str)) 

1020 formatter_class = get_class_of(dataset.formatter) 

1021 if not issubclass(formatter_class, Formatter): 

1022 raise TypeError(f"Requested formatter {dataset.formatter} is not a Formatter class.") 

1023 dataset.formatter = formatter_class 

1024 dataset.path = self._standardizeIngestPath(dataset.path, transfer=transfer) 

1025 filtered.append(dataset) 

1026 return _IngestPrepData(filtered) 

1027 

1028 @transactional 

1029 def _finishIngest( 

1030 self, 

1031 prepData: Datastore.IngestPrepData, 

1032 *, 

1033 transfer: Optional[str] = None, 

1034 record_validation_info: bool = True, 

1035 ) -> None: 

1036 # Docstring inherited from Datastore._finishIngest. 

1037 refsAndInfos = [] 

1038 progress = Progress("lsst.daf.butler.datastores.FileDatastore.ingest", level=logging.DEBUG) 

1039 for dataset in progress.wrap(prepData.datasets, desc="Ingesting dataset files"): 

1040 # Do ingest as if the first dataset ref is associated with the file 

1041 info = self._extractIngestInfo( 

1042 dataset.path, 

1043 dataset.refs[0], 

1044 formatter=dataset.formatter, 

1045 transfer=transfer, 

1046 record_validation_info=record_validation_info, 

1047 ) 

1048 refsAndInfos.extend([(ref, info) for ref in dataset.refs]) 

1049 self._register_datasets(refsAndInfos) 

1050 

1051 def _calculate_ingested_datastore_name( 

1052 self, 

1053 srcUri: ResourcePath, 

1054 ref: DatasetRef, 

1055 formatter: Formatter | Type[Formatter] | None = None, 

1056 ) -> Location: 

1057 """Given a source URI and a DatasetRef, determine the name the 

1058 dataset will have inside datastore. 

1059 

1060 Parameters 

1061 ---------- 

1062 srcUri : `lsst.resources.ResourcePath` 

1063 URI to the source dataset file. 

1064 ref : `DatasetRef` 

1065 Ref associated with the newly-ingested dataset artifact. This 

1066 is used to determine the name within the datastore. 

1067 formatter : `Formatter` or Formatter class. 

1068 Formatter to use for validation. Can be a class or an instance. 

1069 No validation of the file extension is performed if the 

1070 ``formatter`` is `None`. This can be used if the caller knows 

1071 that the source URI and target URI will use the same formatter. 

1072 

1073 Returns 

1074 ------- 

1075 location : `Location` 

1076 Target location for the newly-ingested dataset. 

1077 """ 

1078 # Ingesting a file from outside the datastore. 

1079 # This involves a new name. 

1080 template = self.templates.getTemplate(ref) 

1081 location = self.locationFactory.fromPath(template.format(ref)) 

1082 

1083 # Get the extension 

1084 ext = srcUri.getExtension() 

1085 

1086 # Update the destination to include that extension 

1087 location.updateExtension(ext) 

1088 

1089 # Ask the formatter to validate this extension 

1090 if formatter is not None: 

1091 formatter.validateExtension(location) 

1092 

1093 return location 

1094 

1095 def _write_in_memory_to_artifact(self, inMemoryDataset: Any, ref: DatasetRef) -> StoredFileInfo: 

1096 """Write out in memory dataset to datastore. 

1097 

1098 Parameters 

1099 ---------- 

1100 inMemoryDataset : `object` 

1101 Dataset to write to datastore. 

1102 ref : `DatasetRef` 

1103 Registry information associated with this dataset. 

1104 

1105 Returns 

1106 ------- 

1107 info : `StoredFileInfo` 

1108 Information describing the artifact written to the datastore. 

1109 """ 

1110 # May need to coerce the in memory dataset to the correct 

1111 # python type, but first we need to make sure the storage class 

1112 # reflects the one defined in the data repository. 

1113 ref = self._cast_storage_class(ref) 

1114 inMemoryDataset = ref.datasetType.storageClass.coerce_type(inMemoryDataset) 

1115 

1116 location, formatter = self._prepare_for_put(inMemoryDataset, ref) 

1117 uri = location.uri 

1118 

1119 if not uri.dirname().exists(): 

1120 log.debug("Folder %s does not exist yet so creating it.", uri.dirname()) 

1121 uri.dirname().mkdir() 

1122 

1123 if self._transaction is None: 

1124 raise RuntimeError("Attempting to write artifact without transaction enabled") 

1125 

1126 def _removeFileExists(uri: ResourcePath) -> None: 

1127 """Remove a file and do not complain if it is not there. 

1128 

1129 This is important since a formatter might fail before the file 

1130 is written and we should not confuse people by writing spurious 

1131 error messages to the log. 

1132 """ 

1133 try: 

1134 uri.remove() 

1135 except FileNotFoundError: 

1136 pass 

1137 

1138 # Register a callback to try to delete the uploaded data if 

1139 # something fails below 

1140 self._transaction.registerUndo("artifactWrite", _removeFileExists, uri) 

1141 

1142 data_written = False 

1143 if not uri.isLocal: 

1144 # This is a remote URI. Some datasets can be serialized directly 

1145 # to bytes and sent to the remote datastore without writing a 

1146 # file. If the dataset is intended to be saved to the cache 

1147 # a file is always written and direct write to the remote 

1148 # datastore is bypassed. 

1149 if not self.cacheManager.should_be_cached(ref): 

1150 try: 

1151 serializedDataset = formatter.toBytes(inMemoryDataset) 

1152 except NotImplementedError: 

1153 # Fallback to the file writing option. 

1154 pass 

1155 except Exception as e: 

1156 raise RuntimeError( 

1157 f"Failed to serialize dataset {ref} of type {type(inMemoryDataset)} to bytes." 

1158 ) from e 

1159 else: 

1160 log.debug("Writing bytes directly to %s", uri) 

1161 uri.write(serializedDataset, overwrite=True) 

1162 log.debug("Successfully wrote bytes directly to %s", uri) 

1163 data_written = True 

1164 

1165 if not data_written: 

1166 # Did not write the bytes directly to object store so instead 

1167 # write to temporary file. Always write to a temporary even if 

1168 # using a local file system -- that gives us atomic writes. 

1169 # If a process is killed as the file is being written we do not 

1170 # want it to remain in the correct place but in corrupt state. 

1171 # For local files write to the output directory not temporary dir. 

1172 prefix = uri.dirname() if uri.isLocal else None 

1173 with ResourcePath.temporary_uri(suffix=uri.getExtension(), prefix=prefix) as temporary_uri: 

1174 # Need to configure the formatter to write to a different 

1175 # location and that needs us to overwrite internals 

1176 log.debug("Writing dataset to temporary location at %s", temporary_uri) 

1177 with formatter._updateLocation(Location(None, temporary_uri)): 

1178 try: 

1179 formatter.write(inMemoryDataset) 

1180 except Exception as e: 

1181 raise RuntimeError( 

1182 f"Failed to serialize dataset {ref} of type" 

1183 f" {type(inMemoryDataset)} to " 

1184 f"temporary location {temporary_uri}" 

1185 ) from e 

1186 

1187 # Use move for a local file since that becomes an efficient 

1188 # os.rename. For remote resources we use copy to allow the 

1189 # file to be cached afterwards. 

1190 transfer = "move" if uri.isLocal else "copy" 

1191 

1192 uri.transfer_from(temporary_uri, transfer=transfer, overwrite=True) 

1193 

1194 if transfer == "copy": 

1195 # Cache if required 

1196 self.cacheManager.move_to_cache(temporary_uri, ref) 

1197 

1198 log.debug("Successfully wrote dataset to %s via a temporary file.", uri) 

1199 

1200 # URI is needed to resolve what ingest case are we dealing with 

1201 return self._extractIngestInfo(uri, ref, formatter=formatter) 

1202 

1203 def _read_artifact_into_memory( 

1204 self, 

1205 getInfo: DatastoreFileGetInformation, 

1206 ref: DatasetRef, 

1207 isComponent: bool = False, 

1208 cache_ref: Optional[DatasetRef] = None, 

1209 ) -> Any: 

1210 """Read the artifact from datastore into in memory object. 

1211 

1212 Parameters 

1213 ---------- 

1214 getInfo : `DatastoreFileGetInformation` 

1215 Information about the artifact within the datastore. 

1216 ref : `DatasetRef` 

1217 The registry information associated with this artifact. 

1218 isComponent : `bool` 

1219 Flag to indicate if a component is being read from this artifact. 

1220 cache_ref : `DatasetRef`, optional 

1221 The DatasetRef to use when looking up the file in the cache. 

1222 This ref must have the same ID as the supplied ref but can 

1223 be a parent ref or component ref to indicate to the cache whether 

1224 a composite file is being requested from the cache or a component 

1225 file. Without this the cache will default to the supplied ref but 

1226 it can get confused with read-only derived components for 

1227 disassembled composites. 

1228 

1229 Returns 

1230 ------- 

1231 inMemoryDataset : `object` 

1232 The artifact as a python object. 

1233 """ 

1234 location = getInfo.location 

1235 uri = location.uri 

1236 log.debug("Accessing data from %s", uri) 

1237 

1238 if cache_ref is None: 

1239 cache_ref = ref 

1240 if cache_ref.id != ref.id: 

1241 raise ValueError( 

1242 "The supplied cache dataset ref refers to a different dataset than expected:" 

1243 f" {ref.id} != {cache_ref.id}" 

1244 ) 

1245 

1246 # Cannot recalculate checksum but can compare size as a quick check 

1247 # Do not do this if the size is negative since that indicates 

1248 # we do not know. 

1249 recorded_size = getInfo.info.file_size 

1250 resource_size = uri.size() 

1251 if recorded_size >= 0 and resource_size != recorded_size: 

1252 raise RuntimeError( 

1253 "Integrity failure in Datastore. " 

1254 f"Size of file {uri} ({resource_size}) " 

1255 f"does not match size recorded in registry of {recorded_size}" 

1256 ) 

1257 

1258 # For the general case we have choices for how to proceed. 

1259 # 1. Always use a local file (downloading the remote resource to a 

1260 # temporary file if needed). 

1261 # 2. Use a threshold size and read into memory and use bytes. 

1262 # Use both for now with an arbitrary hand off size. 

1263 # This allows small datasets to be downloaded from remote object 

1264 # stores without requiring a temporary file. 

1265 

1266 formatter = getInfo.formatter 

1267 nbytes_max = 10_000_000 # Arbitrary number that we can tune 

1268 if resource_size <= nbytes_max and formatter.can_read_bytes(): 

1269 with self.cacheManager.find_in_cache(cache_ref, uri.getExtension()) as cached_file: 

1270 if cached_file is not None: 

1271 desired_uri = cached_file 

1272 msg = f" (cached version of {uri})" 

1273 else: 

1274 desired_uri = uri 

1275 msg = "" 

1276 with time_this(log, msg="Reading bytes from %s%s", args=(desired_uri, msg)): 

1277 serializedDataset = desired_uri.read() 

1278 log.debug( 

1279 "Deserializing %s from %d bytes from location %s with formatter %s", 

1280 f"component {getInfo.component}" if isComponent else "", 

1281 len(serializedDataset), 

1282 uri, 

1283 formatter.name(), 

1284 ) 

1285 try: 

1286 result = formatter.fromBytes( 

1287 serializedDataset, component=getInfo.component if isComponent else None 

1288 ) 

1289 except Exception as e: 

1290 raise ValueError( 

1291 f"Failure from formatter '{formatter.name()}' for dataset {ref.id}" 

1292 f" ({ref.datasetType.name} from {uri}): {e}" 

1293 ) from e 

1294 else: 

1295 # Read from file. 

1296 

1297 # Have to update the Location associated with the formatter 

1298 # because formatter.read does not allow an override. 

1299 # This could be improved. 

1300 location_updated = False 

1301 msg = "" 

1302 

1303 # First check in cache for local version. 

1304 # The cache will only be relevant for remote resources but 

1305 # no harm in always asking. Context manager ensures that cache 

1306 # file is not deleted during cache expiration. 

1307 with self.cacheManager.find_in_cache(cache_ref, uri.getExtension()) as cached_file: 

1308 if cached_file is not None: 

1309 msg = f"(via cache read of remote file {uri})" 

1310 uri = cached_file 

1311 location_updated = True 

1312 

1313 with uri.as_local() as local_uri: 

1314 can_be_cached = False 

1315 if uri != local_uri: 

1316 # URI was remote and file was downloaded 

1317 cache_msg = "" 

1318 location_updated = True 

1319 

1320 if self.cacheManager.should_be_cached(cache_ref): 

1321 # In this scenario we want to ask if the downloaded 

1322 # file should be cached but we should not cache 

1323 # it until after we've used it (to ensure it can't 

1324 # be expired whilst we are using it). 

1325 can_be_cached = True 

1326 

1327 # Say that it is "likely" to be cached because 

1328 # if the formatter read fails we will not be 

1329 # caching this file. 

1330 cache_msg = " and likely cached" 

1331 

1332 msg = f"(via download to local file{cache_msg})" 

1333 

1334 # Calculate the (possibly) new location for the formatter 

1335 # to use. 

1336 newLocation = Location(*local_uri.split()) if location_updated else None 

1337 

1338 log.debug( 

1339 "Reading%s from location %s %s with formatter %s", 

1340 f" component {getInfo.component}" if isComponent else "", 

1341 uri, 

1342 msg, 

1343 formatter.name(), 

1344 ) 

1345 try: 

1346 with formatter._updateLocation(newLocation): 

1347 with time_this( 

1348 log, 

1349 msg="Reading%s from location %s %s with formatter %s", 

1350 args=( 

1351 f" component {getInfo.component}" if isComponent else "", 

1352 uri, 

1353 msg, 

1354 formatter.name(), 

1355 ), 

1356 ): 

1357 result = formatter.read(component=getInfo.component if isComponent else None) 

1358 except Exception as e: 

1359 raise ValueError( 

1360 f"Failure from formatter '{formatter.name()}' for dataset {ref.id}" 

1361 f" ({ref.datasetType.name} from {uri}): {e}" 

1362 ) from e 

1363 

1364 # File was read successfully so can move to cache 

1365 if can_be_cached: 

1366 self.cacheManager.move_to_cache(local_uri, cache_ref) 

1367 

1368 return self._post_process_get( 

1369 result, ref.datasetType.storageClass, getInfo.assemblerParams, isComponent=isComponent 

1370 ) 

1371 

1372 def knows(self, ref: DatasetRef) -> bool: 

1373 """Check if the dataset is known to the datastore. 

1374 

1375 Does not check for existence of any artifact. 

1376 

1377 Parameters 

1378 ---------- 

1379 ref : `DatasetRef` 

1380 Reference to the required dataset. 

1381 

1382 Returns 

1383 ------- 

1384 exists : `bool` 

1385 `True` if the dataset is known to the datastore. 

1386 """ 

1387 fileLocations = self._get_dataset_locations_info(ref) 

1388 if fileLocations: 

1389 return True 

1390 return False 

1391 

1392 def knows_these(self, refs: Iterable[DatasetRef]) -> dict[DatasetRef, bool]: 

1393 # Docstring inherited from the base class. 

1394 

1395 # The records themselves. Could be missing some entries. 

1396 records = self._get_stored_records_associated_with_refs(refs) 

1397 

1398 return {ref: ref.id in records for ref in refs} 

1399 

1400 def _process_mexists_records( 

1401 self, 

1402 id_to_ref: Dict[DatasetId, DatasetRef], 

1403 records: Dict[DatasetId, List[StoredFileInfo]], 

1404 all_required: bool, 

1405 artifact_existence: Optional[Dict[ResourcePath, bool]] = None, 

1406 ) -> Dict[DatasetRef, bool]: 

1407 """Helper function for mexists that checks the given records. 

1408 

1409 Parameters 

1410 ---------- 

1411 id_to_ref : `dict` of [`DatasetId`, `DatasetRef`] 

1412 Mapping of the dataset ID to the dataset ref itself. 

1413 records : `dict` of [`DatasetId`, `list` of `StoredFileInfo`] 

1414 Records as generally returned by 

1415 ``_get_stored_records_associated_with_refs``. 

1416 all_required : `bool` 

1417 Flag to indicate whether existence requires all artifacts 

1418 associated with a dataset ID to exist or not for existence. 

1419 artifact_existence : `dict` [`lsst.resources.ResourcePath`, `bool`] 

1420 Optional mapping of datastore artifact to existence. Updated by 

1421 this method with details of all artifacts tested. Can be `None` 

1422 if the caller is not interested. 

1423 

1424 Returns 

1425 ------- 

1426 existence : `dict` of [`DatasetRef`, `bool`] 

1427 Mapping from dataset to boolean indicating existence. 

1428 """ 

1429 # The URIs to be checked and a mapping of those URIs to 

1430 # the dataset ID. 

1431 uris_to_check: List[ResourcePath] = [] 

1432 location_map: Dict[ResourcePath, DatasetId] = {} 

1433 

1434 location_factory = self.locationFactory 

1435 

1436 uri_existence: Dict[ResourcePath, bool] = {} 

1437 for ref_id, infos in records.items(): 

1438 # Key is the dataset Id, value is list of StoredItemInfo 

1439 uris = [info.file_location(location_factory).uri for info in infos] 

1440 location_map.update({uri: ref_id for uri in uris}) 

1441 

1442 # Check the local cache directly for a dataset corresponding 

1443 # to the remote URI. 

1444 if self.cacheManager.file_count > 0: 

1445 ref = id_to_ref[ref_id] 

1446 for uri, storedFileInfo in zip(uris, infos): 

1447 check_ref = ref 

1448 if not ref.datasetType.isComponent() and (component := storedFileInfo.component): 

1449 check_ref = ref.makeComponentRef(component) 

1450 if self.cacheManager.known_to_cache(check_ref, uri.getExtension()): 

1451 # Proxy for URI existence. 

1452 uri_existence[uri] = True 

1453 else: 

1454 uris_to_check.append(uri) 

1455 else: 

1456 # Check all of them. 

1457 uris_to_check.extend(uris) 

1458 

1459 if artifact_existence is not None: 

1460 # If a URI has already been checked remove it from the list 

1461 # and immediately add the status to the output dict. 

1462 filtered_uris_to_check = [] 

1463 for uri in uris_to_check: 

1464 if uri in artifact_existence: 

1465 uri_existence[uri] = artifact_existence[uri] 

1466 else: 

1467 filtered_uris_to_check.append(uri) 

1468 uris_to_check = filtered_uris_to_check 

1469 

1470 # Results. 

1471 dataset_existence: Dict[DatasetRef, bool] = {} 

1472 

1473 uri_existence.update(ResourcePath.mexists(uris_to_check)) 

1474 for uri, exists in uri_existence.items(): 

1475 dataset_id = location_map[uri] 

1476 ref = id_to_ref[dataset_id] 

1477 

1478 # Disassembled composite needs to check all locations. 

1479 # all_required indicates whether all need to exist or not. 

1480 if ref in dataset_existence: 

1481 if all_required: 

1482 exists = dataset_existence[ref] and exists 

1483 else: 

1484 exists = dataset_existence[ref] or exists 

1485 dataset_existence[ref] = exists 

1486 

1487 if artifact_existence is not None: 

1488 artifact_existence.update(uri_existence) 

1489 

1490 return dataset_existence 

1491 

1492 def mexists( 

1493 self, refs: Iterable[DatasetRef], artifact_existence: Optional[Dict[ResourcePath, bool]] = None 

1494 ) -> Dict[DatasetRef, bool]: 

1495 """Check the existence of multiple datasets at once. 

1496 

1497 Parameters 

1498 ---------- 

1499 refs : iterable of `DatasetRef` 

1500 The datasets to be checked. 

1501 artifact_existence : `dict` [`lsst.resources.ResourcePath`, `bool`] 

1502 Optional mapping of datastore artifact to existence. Updated by 

1503 this method with details of all artifacts tested. Can be `None` 

1504 if the caller is not interested. 

1505 

1506 Returns 

1507 ------- 

1508 existence : `dict` of [`DatasetRef`, `bool`] 

1509 Mapping from dataset to boolean indicating existence. 

1510 

1511 Notes 

1512 ----- 

1513 To minimize potentially costly remote existence checks, the local 

1514 cache is checked as a proxy for existence. If a file for this 

1515 `DatasetRef` does exist no check is done for the actual URI. This 

1516 could result in possibly unexpected behavior if the dataset itself 

1517 has been removed from the datastore by another process whilst it is 

1518 still in the cache. 

1519 """ 

1520 chunk_size = 10_000 

1521 dataset_existence: Dict[DatasetRef, bool] = {} 

1522 log.debug("Checking for the existence of multiple artifacts in datastore in chunks of %d", chunk_size) 

1523 n_found_total = 0 

1524 n_checked = 0 

1525 n_chunks = 0 

1526 for chunk in chunk_iterable(refs, chunk_size=chunk_size): 

1527 chunk_result = self._mexists(chunk, artifact_existence) 

1528 

1529 # The log message level and content depend on how many 

1530 # datasets we are processing. 

1531 n_results = len(chunk_result) 

1532 

1533 # Use verbose logging to ensure that messages can be seen 

1534 # easily if many refs are being checked. 

1535 log_threshold = VERBOSE 

1536 n_checked += n_results 

1537 

1538 # This sum can take some time so only do it if we know the 

1539 # result is going to be used. 

1540 n_found = 0 

1541 if log.isEnabledFor(log_threshold): 

1542 # Can treat the booleans as 0, 1 integers and sum them. 

1543 n_found = sum(chunk_result.values()) 

1544 n_found_total += n_found 

1545 

1546 # We are deliberately not trying to count the number of refs 

1547 # provided in case it's in the millions. This means there is a 

1548 # situation where the number of refs exactly matches the chunk 

1549 # size and we will switch to the multi-chunk path even though 

1550 # we only have a single chunk. 

1551 if n_results < chunk_size and n_chunks == 0: 

1552 # Single chunk will be processed so we can provide more detail. 

1553 if n_results == 1: 

1554 ref = list(chunk_result)[0] 

1555 # Use debug logging to be consistent with `exists()`. 

1556 log.debug( 

1557 "Calling mexists() with single ref that does%s exist (%s).", 

1558 "" if chunk_result[ref] else " not", 

1559 ref, 

1560 ) 

1561 else: 

1562 # Single chunk but multiple files. Summarize. 

1563 log.log( 

1564 log_threshold, 

1565 "Number of datasets found in datastore: %d out of %d datasets checked.", 

1566 n_found, 

1567 n_checked, 

1568 ) 

1569 

1570 else: 

1571 # Use incremental verbose logging when we have multiple chunks. 

1572 log.log( 

1573 log_threshold, 

1574 "Number of datasets found in datastore for chunk %d: %d out of %d checked " 

1575 "(running total from all chunks so far: %d found out of %d checked)", 

1576 n_chunks, 

1577 n_found, 

1578 n_results, 

1579 n_found_total, 

1580 n_checked, 

1581 ) 

1582 dataset_existence.update(chunk_result) 

1583 n_chunks += 1 

1584 

1585 return dataset_existence 

1586 

1587 def _mexists( 

1588 self, refs: Sequence[DatasetRef], artifact_existence: Optional[Dict[ResourcePath, bool]] = None 

1589 ) -> Dict[DatasetRef, bool]: 

1590 """Check the existence of multiple datasets at once. 

1591 

1592 Parameters 

1593 ---------- 

1594 refs : iterable of `DatasetRef` 

1595 The datasets to be checked. 

1596 artifact_existence : `dict` [`lsst.resources.ResourcePath`, `bool`] 

1597 Optional mapping of datastore artifact to existence. Updated by 

1598 this method with details of all artifacts tested. Can be `None` 

1599 if the caller is not interested. 

1600 

1601 Returns 

1602 ------- 

1603 existence : `dict` of [`DatasetRef`, `bool`] 

1604 Mapping from dataset to boolean indicating existence. 

1605 """ 

1606 # Make a mapping from refs with the internal storage class to the given 

1607 # refs that may have a different one. We'll use the internal refs 

1608 # throughout this method and convert back at the very end. 

1609 internal_ref_to_input_ref = {self._cast_storage_class(ref): ref for ref in refs} 

1610 

1611 # Need a mapping of dataset_id to (internal) dataset ref since some 

1612 # internal APIs work with dataset_id. 

1613 id_to_ref = {ref.id: ref for ref in internal_ref_to_input_ref} 

1614 

1615 # Set of all IDs we are checking for. 

1616 requested_ids = set(id_to_ref.keys()) 

1617 

1618 # The records themselves. Could be missing some entries. 

1619 records = self._get_stored_records_associated_with_refs(id_to_ref.values()) 

1620 

1621 dataset_existence = self._process_mexists_records( 

1622 id_to_ref, records, True, artifact_existence=artifact_existence 

1623 ) 

1624 

1625 # Set of IDs that have been handled. 

1626 handled_ids = {ref.id for ref in dataset_existence.keys()} 

1627 

1628 missing_ids = requested_ids - handled_ids 

1629 if missing_ids: 

1630 dataset_existence.update( 

1631 self._mexists_check_expected( 

1632 [id_to_ref[missing] for missing in missing_ids], artifact_existence 

1633 ) 

1634 ) 

1635 

1636 return { 

1637 internal_ref_to_input_ref[internal_ref]: existence 

1638 for internal_ref, existence in dataset_existence.items() 

1639 } 

1640 

1641 def _mexists_check_expected( 

1642 self, refs: Sequence[DatasetRef], artifact_existence: Optional[Dict[ResourcePath, bool]] = None 

1643 ) -> Dict[DatasetRef, bool]: 

1644 """Check existence of refs that are not known to datastore. 

1645 

1646 Parameters 

1647 ---------- 

1648 refs : iterable of `DatasetRef` 

1649 The datasets to be checked. These are assumed not to be known 

1650 to datastore. 

1651 artifact_existence : `dict` [`lsst.resources.ResourcePath`, `bool`] 

1652 Optional mapping of datastore artifact to existence. Updated by 

1653 this method with details of all artifacts tested. Can be `None` 

1654 if the caller is not interested. 

1655 

1656 Returns 

1657 ------- 

1658 existence : `dict` of [`DatasetRef`, `bool`] 

1659 Mapping from dataset to boolean indicating existence. 

1660 """ 

1661 dataset_existence: Dict[DatasetRef, bool] = {} 

1662 if not self.trustGetRequest: 

1663 # Must assume these do not exist 

1664 for ref in refs: 

1665 dataset_existence[ref] = False 

1666 else: 

1667 log.debug( 

1668 "%d datasets were not known to datastore during initial existence check.", 

1669 len(refs), 

1670 ) 

1671 

1672 # Construct data structure identical to that returned 

1673 # by _get_stored_records_associated_with_refs() but using 

1674 # guessed names. 

1675 records = {} 

1676 id_to_ref = {} 

1677 for missing_ref in refs: 

1678 expected = self._get_expected_dataset_locations_info(missing_ref) 

1679 dataset_id = missing_ref.id 

1680 records[dataset_id] = [info for _, info in expected] 

1681 id_to_ref[dataset_id] = missing_ref 

1682 

1683 dataset_existence.update( 

1684 self._process_mexists_records( 

1685 id_to_ref, 

1686 records, 

1687 False, 

1688 artifact_existence=artifact_existence, 

1689 ) 

1690 ) 

1691 

1692 return dataset_existence 

1693 

1694 def exists(self, ref: DatasetRef) -> bool: 

1695 """Check if the dataset exists in the datastore. 

1696 

1697 Parameters 

1698 ---------- 

1699 ref : `DatasetRef` 

1700 Reference to the required dataset. 

1701 

1702 Returns 

1703 ------- 

1704 exists : `bool` 

1705 `True` if the entity exists in the `Datastore`. 

1706 

1707 Notes 

1708 ----- 

1709 The local cache is checked as a proxy for existence in the remote 

1710 object store. It is possible that another process on a different 

1711 compute node could remove the file from the object store even 

1712 though it is present in the local cache. 

1713 """ 

1714 ref = self._cast_storage_class(ref) 

1715 fileLocations = self._get_dataset_locations_info(ref) 

1716 

1717 # if we are being asked to trust that registry might not be correct 

1718 # we ask for the expected locations and check them explicitly 

1719 if not fileLocations: 

1720 if not self.trustGetRequest: 

1721 return False 

1722 

1723 # First check the cache. If it is not found we must check 

1724 # the datastore itself. Assume that any component in the cache 

1725 # means that the dataset does exist somewhere. 

1726 if self.cacheManager.known_to_cache(ref): 

1727 return True 

1728 

1729 # When we are guessing a dataset location we can not check 

1730 # for the existence of every component since we can not 

1731 # know if every component was written. Instead we check 

1732 # for the existence of any of the expected locations. 

1733 for location, _ in self._get_expected_dataset_locations_info(ref): 

1734 if self._artifact_exists(location): 

1735 return True 

1736 return False 

1737 

1738 # All listed artifacts must exist. 

1739 for location, storedFileInfo in fileLocations: 

1740 # Checking in cache needs the component ref. 

1741 check_ref = ref 

1742 if not ref.datasetType.isComponent() and (component := storedFileInfo.component): 

1743 check_ref = ref.makeComponentRef(component) 

1744 if self.cacheManager.known_to_cache(check_ref, location.getExtension()): 

1745 continue 

1746 

1747 if not self._artifact_exists(location): 

1748 return False 

1749 

1750 return True 

1751 

1752 def getURIs(self, ref: DatasetRef, predict: bool = False) -> DatasetRefURIs: 

1753 """Return URIs associated with dataset. 

1754 

1755 Parameters 

1756 ---------- 

1757 ref : `DatasetRef` 

1758 Reference to the required dataset. 

1759 predict : `bool`, optional 

1760 If the datastore does not know about the dataset, should it 

1761 return a predicted URI or not? 

1762 

1763 Returns 

1764 ------- 

1765 uris : `DatasetRefURIs` 

1766 The URI to the primary artifact associated with this dataset (if 

1767 the dataset was disassembled within the datastore this may be 

1768 `None`), and the URIs to any components associated with the dataset 

1769 artifact. (can be empty if there are no components). 

1770 """ 

1771 many = self.getManyURIs([ref], predict=predict, allow_missing=False) 

1772 return many[ref] 

1773 

1774 def getURI(self, ref: DatasetRef, predict: bool = False) -> ResourcePath: 

1775 """URI to the Dataset. 

1776 

1777 Parameters 

1778 ---------- 

1779 ref : `DatasetRef` 

1780 Reference to the required Dataset. 

1781 predict : `bool` 

1782 If `True`, allow URIs to be returned of datasets that have not 

1783 been written. 

1784 

1785 Returns 

1786 ------- 

1787 uri : `str` 

1788 URI pointing to the dataset within the datastore. If the 

1789 dataset does not exist in the datastore, and if ``predict`` is 

1790 `True`, the URI will be a prediction and will include a URI 

1791 fragment "#predicted". 

1792 If the datastore does not have entities that relate well 

1793 to the concept of a URI the returned URI will be 

1794 descriptive. The returned URI is not guaranteed to be obtainable. 

1795 

1796 Raises 

1797 ------ 

1798 FileNotFoundError 

1799 Raised if a URI has been requested for a dataset that does not 

1800 exist and guessing is not allowed. 

1801 RuntimeError 

1802 Raised if a request is made for a single URI but multiple URIs 

1803 are associated with this dataset. 

1804 

1805 Notes 

1806 ----- 

1807 When a predicted URI is requested an attempt will be made to form 

1808 a reasonable URI based on file templates and the expected formatter. 

1809 """ 

1810 primary, components = self.getURIs(ref, predict) 

1811 if primary is None or components: 

1812 raise RuntimeError( 

1813 f"Dataset ({ref}) includes distinct URIs for components. Use Datastore.getURIs() instead." 

1814 ) 

1815 return primary 

1816 

1817 def _predict_URIs( 

1818 self, 

1819 ref: DatasetRef, 

1820 ) -> DatasetRefURIs: 

1821 """Predict the URIs of a dataset ref. 

1822 

1823 Parameters 

1824 ---------- 

1825 ref : `DatasetRef` 

1826 Reference to the required Dataset. 

1827 

1828 Returns 

1829 ------- 

1830 URI : DatasetRefUris 

1831 Primary and component URIs. URIs will contain a URI fragment 

1832 "#predicted". 

1833 """ 

1834 uris = DatasetRefURIs() 

1835 

1836 if self.composites.shouldBeDisassembled(ref): 

1837 for component, _ in ref.datasetType.storageClass.components.items(): 

1838 comp_ref = ref.makeComponentRef(component) 

1839 comp_location, _ = self._determine_put_formatter_location(comp_ref) 

1840 

1841 # Add the "#predicted" URI fragment to indicate this is a 

1842 # guess 

1843 uris.componentURIs[component] = ResourcePath(comp_location.uri.geturl() + "#predicted") 

1844 

1845 else: 

1846 location, _ = self._determine_put_formatter_location(ref) 

1847 

1848 # Add the "#predicted" URI fragment to indicate this is a guess 

1849 uris.primaryURI = ResourcePath(location.uri.geturl() + "#predicted") 

1850 

1851 return uris 

1852 

1853 def getManyURIs( 

1854 self, 

1855 refs: Iterable[DatasetRef], 

1856 predict: bool = False, 

1857 allow_missing: bool = False, 

1858 ) -> Dict[DatasetRef, DatasetRefURIs]: 

1859 # Docstring inherited 

1860 

1861 uris: Dict[DatasetRef, DatasetRefURIs] = {} 

1862 

1863 records = self._get_stored_records_associated_with_refs(refs) 

1864 records_keys = records.keys() 

1865 

1866 existing_refs = tuple(ref for ref in refs if ref.id in records_keys) 

1867 missing_refs = tuple(ref for ref in refs if ref.id not in records_keys) 

1868 

1869 # Have to handle trustGetRequest mode by checking for the existence 

1870 # of the missing refs on disk. 

1871 if missing_refs: 

1872 dataset_existence = self._mexists_check_expected(missing_refs, None) 

1873 really_missing = set() 

1874 not_missing = set() 

1875 for ref, exists in dataset_existence.items(): 

1876 if exists: 

1877 not_missing.add(ref) 

1878 else: 

1879 really_missing.add(ref) 

1880 

1881 if not_missing: 

1882 # Need to recalculate the missing/existing split. 

1883 existing_refs = existing_refs + tuple(not_missing) 

1884 missing_refs = tuple(really_missing) 

1885 

1886 for ref in missing_refs: 

1887 # if this has never been written then we have to guess 

1888 if not predict: 

1889 if not allow_missing: 

1890 raise FileNotFoundError("Dataset {} not in this datastore.".format(ref)) 

1891 else: 

1892 uris[ref] = self._predict_URIs(ref) 

1893 

1894 for ref in existing_refs: 

1895 file_infos = records[ref.id] 

1896 file_locations = [(i.file_location(self.locationFactory), i) for i in file_infos] 

1897 uris[ref] = self._locations_to_URI(ref, file_locations) 

1898 

1899 return uris 

1900 

1901 def _locations_to_URI( 

1902 self, 

1903 ref: DatasetRef, 

1904 file_locations: Sequence[Tuple[Location, StoredFileInfo]], 

1905 ) -> DatasetRefURIs: 

1906 """Convert one or more file locations associated with a DatasetRef 

1907 to a DatasetRefURIs. 

1908 

1909 Parameters 

1910 ---------- 

1911 ref : `DatasetRef` 

1912 Reference to the dataset. 

1913 file_locations : Sequence[Tuple[Location, StoredFileInfo]] 

1914 Each item in the sequence is the location of the dataset within the 

1915 datastore and stored information about the file and its formatter. 

1916 If there is only one item in the sequence then it is treated as the 

1917 primary URI. If there is more than one item then they are treated 

1918 as component URIs. If there are no items then an error is raised 

1919 unless ``self.trustGetRequest`` is `True`. 

1920 

1921 Returns 

1922 ------- 

1923 uris: DatasetRefURIs 

1924 Represents the primary URI or component URIs described by the 

1925 inputs. 

1926 

1927 Raises 

1928 ------ 

1929 RuntimeError 

1930 If no file locations are passed in and ``self.trustGetRequest`` is 

1931 `False`. 

1932 FileNotFoundError 

1933 If the a passed-in URI does not exist, and ``self.trustGetRequest`` 

1934 is `False`. 

1935 RuntimeError 

1936 If a passed in `StoredFileInfo`'s ``component`` is `None` (this is 

1937 unexpected). 

1938 """ 

1939 

1940 guessing = False 

1941 uris = DatasetRefURIs() 

1942 

1943 if not file_locations: 

1944 if not self.trustGetRequest: 

1945 raise RuntimeError(f"Unexpectedly got no artifacts for dataset {ref}") 

1946 file_locations = self._get_expected_dataset_locations_info(ref) 

1947 guessing = True 

1948 

1949 if len(file_locations) == 1: 

1950 # No disassembly so this is the primary URI 

1951 uris.primaryURI = file_locations[0][0].uri 

1952 if guessing and not uris.primaryURI.exists(): 

1953 raise FileNotFoundError(f"Expected URI ({uris.primaryURI}) does not exist") 

1954 else: 

1955 for location, file_info in file_locations: 

1956 if file_info.component is None: 

1957 raise RuntimeError(f"Unexpectedly got no component name for a component at {location}") 

1958 if guessing and not location.uri.exists(): 

1959 # If we are trusting then it is entirely possible for 

1960 # some components to be missing. In that case we skip 

1961 # to the next component. 

1962 if self.trustGetRequest: 

1963 continue 

1964 raise FileNotFoundError(f"Expected URI ({location.uri}) does not exist") 

1965 uris.componentURIs[file_info.component] = location.uri 

1966 

1967 return uris 

1968 

1969 def retrieveArtifacts( 

1970 self, 

1971 refs: Iterable[DatasetRef], 

1972 destination: ResourcePath, 

1973 transfer: str = "auto", 

1974 preserve_path: bool = True, 

1975 overwrite: bool = False, 

1976 ) -> List[ResourcePath]: 

1977 """Retrieve the file artifacts associated with the supplied refs. 

1978 

1979 Parameters 

1980 ---------- 

1981 refs : iterable of `DatasetRef` 

1982 The datasets for which file artifacts are to be retrieved. 

1983 A single ref can result in multiple files. The refs must 

1984 be resolved. 

1985 destination : `lsst.resources.ResourcePath` 

1986 Location to write the file artifacts. 

1987 transfer : `str`, optional 

1988 Method to use to transfer the artifacts. Must be one of the options 

1989 supported by `lsst.resources.ResourcePath.transfer_from()`. 

1990 "move" is not allowed. 

1991 preserve_path : `bool`, optional 

1992 If `True` the full path of the file artifact within the datastore 

1993 is preserved. If `False` the final file component of the path 

1994 is used. 

1995 overwrite : `bool`, optional 

1996 If `True` allow transfers to overwrite existing files at the 

1997 destination. 

1998 

1999 Returns 

2000 ------- 

2001 targets : `list` of `lsst.resources.ResourcePath` 

2002 URIs of file artifacts in destination location. Order is not 

2003 preserved. 

2004 """ 

2005 if not destination.isdir(): 

2006 raise ValueError(f"Destination location must refer to a directory. Given {destination}") 

2007 

2008 if transfer == "move": 

2009 raise ValueError("Can not move artifacts out of datastore. Use copy instead.") 

2010 

2011 # Source -> Destination 

2012 # This also helps filter out duplicate DatasetRef in the request 

2013 # that will map to the same underlying file transfer. 

2014 to_transfer: Dict[ResourcePath, ResourcePath] = {} 

2015 

2016 for ref in refs: 

2017 locations = self._get_dataset_locations_info(ref) 

2018 for location, _ in locations: 

2019 source_uri = location.uri 

2020 target_path: ResourcePathExpression 

2021 if preserve_path: 

2022 target_path = location.pathInStore 

2023 if target_path.isabs(): 

2024 # This is an absolute path to an external file. 

2025 # Use the full path. 

2026 target_path = target_path.relativeToPathRoot 

2027 else: 

2028 target_path = source_uri.basename() 

2029 target_uri = destination.join(target_path) 

2030 to_transfer[source_uri] = target_uri 

2031 

2032 # In theory can now parallelize the transfer 

2033 log.debug("Number of artifacts to transfer to %s: %d", str(destination), len(to_transfer)) 

2034 for source_uri, target_uri in to_transfer.items(): 

2035 target_uri.transfer_from(source_uri, transfer=transfer, overwrite=overwrite) 

2036 

2037 return list(to_transfer.values()) 

2038 

2039 def get( 

2040 self, 

2041 ref: DatasetRef, 

2042 parameters: Optional[Mapping[str, Any]] = None, 

2043 storageClass: Optional[Union[StorageClass, str]] = None, 

2044 ) -> Any: 

2045 """Load an InMemoryDataset from the store. 

2046 

2047 Parameters 

2048 ---------- 

2049 ref : `DatasetRef` 

2050 Reference to the required Dataset. 

2051 parameters : `dict` 

2052 `StorageClass`-specific parameters that specify, for example, 

2053 a slice of the dataset to be loaded. 

2054 storageClass : `StorageClass` or `str`, optional 

2055 The storage class to be used to override the Python type 

2056 returned by this method. By default the returned type matches 

2057 the dataset type definition for this dataset. Specifying a 

2058 read `StorageClass` can force a different type to be returned. 

2059 This type must be compatible with the original type. 

2060 

2061 Returns 

2062 ------- 

2063 inMemoryDataset : `object` 

2064 Requested dataset or slice thereof as an InMemoryDataset. 

2065 

2066 Raises 

2067 ------ 

2068 FileNotFoundError 

2069 Requested dataset can not be retrieved. 

2070 TypeError 

2071 Return value from formatter has unexpected type. 

2072 ValueError 

2073 Formatter failed to process the dataset. 

2074 """ 

2075 # Supplied storage class for the component being read is either 

2076 # from the ref itself or some an override if we want to force 

2077 # type conversion. 

2078 if storageClass is not None: 

2079 ref = ref.overrideStorageClass(storageClass) 

2080 refStorageClass = ref.datasetType.storageClass 

2081 

2082 allGetInfo = self._prepare_for_get(ref, parameters) 

2083 refComponent = ref.datasetType.component() 

2084 

2085 # Create mapping from component name to related info 

2086 allComponents = {i.component: i for i in allGetInfo} 

2087 

2088 # By definition the dataset is disassembled if we have more 

2089 # than one record for it. 

2090 isDisassembled = len(allGetInfo) > 1 

2091 

2092 # Look for the special case where we are disassembled but the 

2093 # component is a derived component that was not written during 

2094 # disassembly. For this scenario we need to check that the 

2095 # component requested is listed as a derived component for the 

2096 # composite storage class 

2097 isDisassembledReadOnlyComponent = False 

2098 if isDisassembled and refComponent: 

2099 # The composite storage class should be accessible through 

2100 # the component dataset type 

2101 compositeStorageClass = ref.datasetType.parentStorageClass 

2102 

2103 # In the unlikely scenario where the composite storage 

2104 # class is not known, we can only assume that this is a 

2105 # normal component. If that assumption is wrong then the 

2106 # branch below that reads a persisted component will fail 

2107 # so there is no need to complain here. 

2108 if compositeStorageClass is not None: 

2109 isDisassembledReadOnlyComponent = refComponent in compositeStorageClass.derivedComponents 

2110 

2111 if isDisassembled and not refComponent: 

2112 # This was a disassembled dataset spread over multiple files 

2113 # and we need to put them all back together again. 

2114 # Read into memory and then assemble 

2115 

2116 # Check that the supplied parameters are suitable for the type read 

2117 refStorageClass.validateParameters(parameters) 

2118 

2119 # We want to keep track of all the parameters that were not used 

2120 # by formatters. We assume that if any of the component formatters 

2121 # use a parameter that we do not need to apply it again in the 

2122 # assembler. 

2123 usedParams = set() 

2124 

2125 components: Dict[str, Any] = {} 

2126 for getInfo in allGetInfo: 

2127 # assemblerParams are parameters not understood by the 

2128 # associated formatter. 

2129 usedParams.update(set(getInfo.formatterParams)) 

2130 

2131 component = getInfo.component 

2132 

2133 if component is None: 

2134 raise RuntimeError(f"Internal error in datastore assembly of {ref}") 

2135 

2136 # We do not want the formatter to think it's reading 

2137 # a component though because it is really reading a 

2138 # standalone dataset -- always tell reader it is not a 

2139 # component. 

2140 components[component] = self._read_artifact_into_memory( 

2141 getInfo, ref.makeComponentRef(component), isComponent=False 

2142 ) 

2143 

2144 inMemoryDataset = ref.datasetType.storageClass.delegate().assemble(components) 

2145 

2146 # Any unused parameters will have to be passed to the assembler 

2147 if parameters: 

2148 unusedParams = {k: v for k, v in parameters.items() if k not in usedParams} 

2149 else: 

2150 unusedParams = {} 

2151 

2152 # Process parameters 

2153 return ref.datasetType.storageClass.delegate().handleParameters( 

2154 inMemoryDataset, parameters=unusedParams 

2155 ) 

2156 

2157 elif isDisassembledReadOnlyComponent: 

2158 compositeStorageClass = ref.datasetType.parentStorageClass 

2159 if compositeStorageClass is None: 

2160 raise RuntimeError( 

2161 f"Unable to retrieve derived component '{refComponent}' since" 

2162 "no composite storage class is available." 

2163 ) 

2164 

2165 if refComponent is None: 

2166 # Mainly for mypy 

2167 raise RuntimeError(f"Internal error in datastore {self.name}: component can not be None here") 

2168 

2169 # Assume that every derived component can be calculated by 

2170 # forwarding the request to a single read/write component. 

2171 # Rather than guessing which rw component is the right one by 

2172 # scanning each for a derived component of the same name, 

2173 # we ask the storage class delegate directly which one is best to 

2174 # use. 

2175 compositeDelegate = compositeStorageClass.delegate() 

2176 forwardedComponent = compositeDelegate.selectResponsibleComponent( 

2177 refComponent, set(allComponents) 

2178 ) 

2179 

2180 # Select the relevant component 

2181 rwInfo = allComponents[forwardedComponent] 

2182 

2183 # For now assume that read parameters are validated against 

2184 # the real component and not the requested component 

2185 forwardedStorageClass = rwInfo.formatter.fileDescriptor.readStorageClass 

2186 forwardedStorageClass.validateParameters(parameters) 

2187 

2188 # The reference to use for the caching must refer to the forwarded 

2189 # component and not the derived component. 

2190 cache_ref = ref.makeCompositeRef().makeComponentRef(forwardedComponent) 

2191 

2192 # Unfortunately the FileDescriptor inside the formatter will have 

2193 # the wrong write storage class so we need to create a new one 

2194 # given the immutability constraint. 

2195 writeStorageClass = rwInfo.info.storageClass 

2196 

2197 # We may need to put some thought into parameters for read 

2198 # components but for now forward them on as is 

2199 readFormatter = type(rwInfo.formatter)( 

2200 FileDescriptor( 

2201 rwInfo.location, 

2202 readStorageClass=refStorageClass, 

2203 storageClass=writeStorageClass, 

2204 parameters=parameters, 

2205 ), 

2206 ref.dataId, 

2207 ) 

2208 

2209 # The assembler can not receive any parameter requests for a 

2210 # derived component at this time since the assembler will 

2211 # see the storage class of the derived component and those 

2212 # parameters will have to be handled by the formatter on the 

2213 # forwarded storage class. 

2214 assemblerParams: Dict[str, Any] = {} 

2215 

2216 # Need to created a new info that specifies the derived 

2217 # component and associated storage class 

2218 readInfo = DatastoreFileGetInformation( 

2219 rwInfo.location, 

2220 readFormatter, 

2221 rwInfo.info, 

2222 assemblerParams, 

2223 {}, 

2224 refComponent, 

2225 refStorageClass, 

2226 ) 

2227 

2228 return self._read_artifact_into_memory(readInfo, ref, isComponent=True, cache_ref=cache_ref) 

2229 

2230 else: 

2231 # Single file request or component from that composite file 

2232 for lookup in (refComponent, None): 

2233 if lookup in allComponents: 

2234 getInfo = allComponents[lookup] 

2235 break 

2236 else: 

2237 raise FileNotFoundError( 

2238 f"Component {refComponent} not found for ref {ref} in datastore {self.name}" 

2239 ) 

2240 

2241 # Do not need the component itself if already disassembled 

2242 if isDisassembled: 

2243 isComponent = False 

2244 else: 

2245 isComponent = getInfo.component is not None 

2246 

2247 # For a component read of a composite we want the cache to 

2248 # be looking at the composite ref itself. 

2249 cache_ref = ref.makeCompositeRef() if isComponent else ref 

2250 

2251 # For a disassembled component we can validate parametersagainst 

2252 # the component storage class directly 

2253 if isDisassembled: 

2254 refStorageClass.validateParameters(parameters) 

2255 else: 

2256 # For an assembled composite this could be a derived 

2257 # component derived from a real component. The validity 

2258 # of the parameters is not clear. For now validate against 

2259 # the composite storage class 

2260 getInfo.formatter.fileDescriptor.storageClass.validateParameters(parameters) 

2261 

2262 return self._read_artifact_into_memory(getInfo, ref, isComponent=isComponent, cache_ref=cache_ref) 

2263 

2264 @transactional 

2265 def put(self, inMemoryDataset: Any, ref: DatasetRef) -> None: 

2266 """Write a InMemoryDataset with a given `DatasetRef` to the store. 

2267 

2268 Parameters 

2269 ---------- 

2270 inMemoryDataset : `object` 

2271 The dataset to store. 

2272 ref : `DatasetRef` 

2273 Reference to the associated Dataset. 

2274 

2275 Raises 

2276 ------ 

2277 TypeError 

2278 Supplied object and storage class are inconsistent. 

2279 DatasetTypeNotSupportedError 

2280 The associated `DatasetType` is not handled by this datastore. 

2281 

2282 Notes 

2283 ----- 

2284 If the datastore is configured to reject certain dataset types it 

2285 is possible that the put will fail and raise a 

2286 `DatasetTypeNotSupportedError`. The main use case for this is to 

2287 allow `ChainedDatastore` to put to multiple datastores without 

2288 requiring that every datastore accepts the dataset. 

2289 """ 

2290 

2291 doDisassembly = self.composites.shouldBeDisassembled(ref) 

2292 # doDisassembly = True 

2293 

2294 artifacts = [] 

2295 if doDisassembly: 

2296 components = ref.datasetType.storageClass.delegate().disassemble(inMemoryDataset) 

2297 if components is None: 

2298 raise RuntimeError( 

2299 f"Inconsistent configuration: dataset type {ref.datasetType.name} " 

2300 f"with storage class {ref.datasetType.storageClass.name} " 

2301 "is configured to be disassembled, but cannot be." 

2302 ) 

2303 for component, componentInfo in components.items(): 

2304 # Don't recurse because we want to take advantage of 

2305 # bulk insert -- need a new DatasetRef that refers to the 

2306 # same dataset_id but has the component DatasetType 

2307 # DatasetType does not refer to the types of components 

2308 # So we construct one ourselves. 

2309 compRef = ref.makeComponentRef(component) 

2310 storedInfo = self._write_in_memory_to_artifact(componentInfo.component, compRef) 

2311 artifacts.append((compRef, storedInfo)) 

2312 else: 

2313 # Write the entire thing out 

2314 storedInfo = self._write_in_memory_to_artifact(inMemoryDataset, ref) 

2315 artifacts.append((ref, storedInfo)) 

2316 

2317 self._register_datasets(artifacts) 

2318 

2319 @transactional 

2320 def trash(self, ref: Union[DatasetRef, Iterable[DatasetRef]], ignore_errors: bool = True) -> None: 

2321 # At this point can safely remove these datasets from the cache 

2322 # to avoid confusion later on. If they are not trashed later 

2323 # the cache will simply be refilled. 

2324 self.cacheManager.remove_from_cache(ref) 

2325 

2326 # If we are in trust mode there will be nothing to move to 

2327 # the trash table and we will have to try to delete the file 

2328 # immediately. 

2329 if self.trustGetRequest: 

2330 # Try to keep the logic below for a single file trash. 

2331 if isinstance(ref, DatasetRef): 

2332 refs = {ref} 

2333 else: 

2334 # Will recreate ref at the end of this branch. 

2335 refs = set(ref) 

2336 

2337 # Determine which datasets are known to datastore directly. 

2338 id_to_ref = {ref.id: ref for ref in refs} 

2339 existing_ids = self._get_stored_records_associated_with_refs(refs) 

2340 existing_refs = {id_to_ref[ref_id] for ref_id in existing_ids} 

2341 

2342 missing = refs - existing_refs 

2343 if missing: 

2344 # Do an explicit existence check on these refs. 

2345 # We only care about the artifacts at this point and not 

2346 # the dataset existence. 

2347 artifact_existence: Dict[ResourcePath, bool] = {} 

2348 _ = self.mexists(missing, artifact_existence) 

2349 uris = [uri for uri, exists in artifact_existence.items() if exists] 

2350 

2351 # FUTURE UPGRADE: Implement a parallelized bulk remove. 

2352 log.debug("Removing %d artifacts from datastore that are unknown to datastore", len(uris)) 

2353 for uri in uris: 

2354 try: 

2355 uri.remove() 

2356 except Exception as e: 

2357 if ignore_errors: 

2358 log.debug("Artifact %s could not be removed: %s", uri, e) 

2359 continue 

2360 raise 

2361 

2362 # There is no point asking the code below to remove refs we 

2363 # know are missing so update it with the list of existing 

2364 # records. Try to retain one vs many logic. 

2365 if not existing_refs: 

2366 # Nothing more to do since none of the datasets were 

2367 # known to the datastore record table. 

2368 return 

2369 ref = list(existing_refs) 

2370 if len(ref) == 1: 

2371 ref = ref[0] 

2372 

2373 # Get file metadata and internal metadata 

2374 if not isinstance(ref, DatasetRef): 

2375 log.debug("Doing multi-dataset trash in datastore %s", self.name) 

2376 # Assumed to be an iterable of refs so bulk mode enabled. 

2377 try: 

2378 self.bridge.moveToTrash(ref, transaction=self._transaction) 

2379 except Exception as e: 

2380 if ignore_errors: 

2381 log.warning("Unexpected issue moving multiple datasets to trash: %s", e) 

2382 else: 

2383 raise 

2384 return 

2385 

2386 log.debug("Trashing dataset %s in datastore %s", ref, self.name) 

2387 

2388 fileLocations = self._get_dataset_locations_info(ref) 

2389 

2390 if not fileLocations: 

2391 err_msg = f"Requested dataset to trash ({ref}) is not known to datastore {self.name}" 

2392 if ignore_errors: 

2393 log.warning(err_msg) 

2394 return 

2395 else: 

2396 raise FileNotFoundError(err_msg) 

2397 

2398 for location, storedFileInfo in fileLocations: 

2399 if not self._artifact_exists(location): 

2400 err_msg = ( 

2401 f"Dataset is known to datastore {self.name} but " 

2402 f"associated artifact ({location.uri}) is missing" 

2403 ) 

2404 if ignore_errors: 

2405 log.warning(err_msg) 

2406 return 

2407 else: 

2408 raise FileNotFoundError(err_msg) 

2409 

2410 # Mark dataset as trashed 

2411 try: 

2412 self.bridge.moveToTrash([ref], transaction=self._transaction) 

2413 except Exception as e: 

2414 if ignore_errors: 

2415 log.warning( 

2416 "Attempted to mark dataset (%s) to be trashed in datastore %s " 

2417 "but encountered an error: %s", 

2418 ref, 

2419 self.name, 

2420 e, 

2421 ) 

2422 pass 

2423 else: 

2424 raise 

2425 

2426 @transactional 

2427 def emptyTrash(self, ignore_errors: bool = True) -> None: 

2428 """Remove all datasets from the trash. 

2429 

2430 Parameters 

2431 ---------- 

2432 ignore_errors : `bool` 

2433 If `True` return without error even if something went wrong. 

2434 Problems could occur if another process is simultaneously trying 

2435 to delete. 

2436 """ 

2437 log.debug("Emptying trash in datastore %s", self.name) 

2438 

2439 # Context manager will empty trash iff we finish it without raising. 

2440 # It will also automatically delete the relevant rows from the 

2441 # trash table and the records table. 

2442 with self.bridge.emptyTrash( 

2443 self._table, record_class=StoredFileInfo, record_column="path" 

2444 ) as trash_data: 

2445 # Removing the artifacts themselves requires that the files are 

2446 # not also associated with refs that are not to be trashed. 

2447 # Therefore need to do a query with the file paths themselves 

2448 # and return all the refs associated with them. Can only delete 

2449 # a file if the refs to be trashed are the only refs associated 

2450 # with the file. 

2451 # This requires multiple copies of the trashed items 

2452 trashed, artifacts_to_keep = trash_data 

2453 

2454 if artifacts_to_keep is None: 

2455 # The bridge is not helping us so have to work it out 

2456 # ourselves. This is not going to be as efficient. 

2457 trashed = list(trashed) 

2458 

2459 # The instance check is for mypy since up to this point it 

2460 # does not know the type of info. 

2461 path_map = self._refs_associated_with_artifacts( 

2462 [info.path for _, info in trashed if isinstance(info, StoredFileInfo)] 

2463 ) 

2464 

2465 for ref, info in trashed: 

2466 # Mypy needs to know this is not the base class 

2467 assert isinstance(info, StoredFileInfo), f"Unexpectedly got info of class {type(info)}" 

2468 

2469 path_map[info.path].remove(ref.id) 

2470 if not path_map[info.path]: 

2471 del path_map[info.path] 

2472 

2473 artifacts_to_keep = set(path_map) 

2474 

2475 for ref, info in trashed: 

2476 # Should not happen for this implementation but need 

2477 # to keep mypy happy. 

2478 assert info is not None, f"Internal logic error in emptyTrash with ref {ref}." 

2479 

2480 # Mypy needs to know this is not the base class 

2481 assert isinstance(info, StoredFileInfo), f"Unexpectedly got info of class {type(info)}" 

2482 

2483 if info.path in artifacts_to_keep: 

2484 # This is a multi-dataset artifact and we are not 

2485 # removing all associated refs. 

2486 continue 

2487 

2488 # Only trashed refs still known to datastore will be returned. 

2489 location = info.file_location(self.locationFactory) 

2490 

2491 # Point of no return for this artifact 

2492 log.debug("Removing artifact %s from datastore %s", location.uri, self.name) 

2493 try: 

2494 self._delete_artifact(location) 

2495 except FileNotFoundError: 

2496 # If the file itself has been deleted there is nothing 

2497 # we can do about it. It is possible that trash has 

2498 # been run in parallel in another process or someone 

2499 # decided to delete the file. It is unlikely to come 

2500 # back and so we should still continue with the removal 

2501 # of the entry from the trash table. It is also possible 

2502 # we removed it in a previous iteration if it was 

2503 # a multi-dataset artifact. The delete artifact method 

2504 # will log a debug message in this scenario. 

2505 # Distinguishing file missing before trash started and 

2506 # file already removed previously as part of this trash 

2507 # is not worth the distinction with regards to potential 

2508 # memory cost. 

2509 pass 

2510 except Exception as e: 

2511 if ignore_errors: 

2512 # Use a debug message here even though it's not 

2513 # a good situation. In some cases this can be 

2514 # caused by a race between user A and user B 

2515 # and neither of them has permissions for the 

2516 # other's files. Butler does not know about users 

2517 # and trash has no idea what collections these 

2518 # files were in (without guessing from a path). 

2519 log.debug( 

2520 "Encountered error removing artifact %s from datastore %s: %s", 

2521 location.uri, 

2522 self.name, 

2523 e, 

2524 ) 

2525 else: 

2526 raise 

2527 

2528 @transactional 

2529 def transfer_from( 

2530 self, 

2531 source_datastore: Datastore, 

2532 refs: Iterable[DatasetRef], 

2533 transfer: str = "auto", 

2534 artifact_existence: Optional[Dict[ResourcePath, bool]] = None, 

2535 ) -> tuple[set[DatasetRef], set[DatasetRef]]: 

2536 # Docstring inherited 

2537 if type(self) is not type(source_datastore): 

2538 raise TypeError( 

2539 f"Datastore mismatch between this datastore ({type(self)}) and the " 

2540 f"source datastore ({type(source_datastore)})." 

2541 ) 

2542 

2543 # Be explicit for mypy 

2544 if not isinstance(source_datastore, FileDatastore): 

2545 raise TypeError( 

2546 "Can only transfer to a FileDatastore from another FileDatastore, not" 

2547 f" {type(source_datastore)}" 

2548 ) 

2549 

2550 # Stop early if "direct" transfer mode is requested. That would 

2551 # require that the URI inside the source datastore should be stored 

2552 # directly in the target datastore, which seems unlikely to be useful 

2553 # since at any moment the source datastore could delete the file. 

2554 if transfer in ("direct", "split"): 

2555 raise ValueError( 

2556 f"Can not transfer from a source datastore using {transfer} mode since" 

2557 " those files are controlled by the other datastore." 

2558 ) 

2559 

2560 # Empty existence lookup if none given. 

2561 if artifact_existence is None: 

2562 artifact_existence = {} 

2563 

2564 # We will go through the list multiple times so must convert 

2565 # generators to lists. 

2566 refs = list(refs) 

2567 

2568 # In order to handle disassembled composites the code works 

2569 # at the records level since it can assume that internal APIs 

2570 # can be used. 

2571 # - If the record already exists in the destination this is assumed 

2572 # to be okay. 

2573 # - If there is no record but the source and destination URIs are 

2574 # identical no transfer is done but the record is added. 

2575 # - If the source record refers to an absolute URI currently assume 

2576 # that that URI should remain absolute and will be visible to the 

2577 # destination butler. May need to have a flag to indicate whether 

2578 # the dataset should be transferred. This will only happen if 

2579 # the detached Butler has had a local ingest. 

2580 

2581 # What we really want is all the records in the source datastore 

2582 # associated with these refs. Or derived ones if they don't exist 

2583 # in the source. 

2584 source_records = source_datastore._get_stored_records_associated_with_refs(refs) 

2585 

2586 # The source dataset_ids are the keys in these records 

2587 source_ids = set(source_records) 

2588 log.debug("Number of datastore records found in source: %d", len(source_ids)) 

2589 

2590 requested_ids = set(ref.id for ref in refs) 

2591 missing_ids = requested_ids - source_ids 

2592 

2593 # Missing IDs can be okay if that datastore has allowed 

2594 # gets based on file existence. Should we transfer what we can 

2595 # or complain about it and warn? 

2596 if missing_ids and not source_datastore.trustGetRequest: 

2597 raise ValueError( 

2598 f"Some datasets are missing from source datastore {source_datastore}: {missing_ids}" 

2599 ) 

2600 

2601 # Need to map these missing IDs to a DatasetRef so we can guess 

2602 # the details. 

2603 if missing_ids: 

2604 log.info( 

2605 "Number of expected datasets missing from source datastore records: %d out of %d", 

2606 len(missing_ids), 

2607 len(requested_ids), 

2608 ) 

2609 id_to_ref = {ref.id: ref for ref in refs if ref.id in missing_ids} 

2610 

2611 # This should be chunked in case we end up having to check 

2612 # the file store since we need some log output to show 

2613 # progress. 

2614 for missing_ids_chunk in chunk_iterable(missing_ids, chunk_size=10_000): 

2615 records = {} 

2616 for missing in missing_ids_chunk: 

2617 # Ask the source datastore where the missing artifacts 

2618 # should be. An execution butler might not know about the 

2619 # artifacts even if they are there. 

2620 expected = source_datastore._get_expected_dataset_locations_info(id_to_ref[missing]) 

2621 records[missing] = [info for _, info in expected] 

2622 

2623 # Call the mexist helper method in case we have not already 

2624 # checked these artifacts such that artifact_existence is 

2625 # empty. This allows us to benefit from parallelism. 

2626 # datastore.mexists() itself does not give us access to the 

2627 # derived datastore record. 

2628 log.verbose("Checking existence of %d datasets unknown to datastore", len(records)) 

2629 ref_exists = source_datastore._process_mexists_records( 

2630 id_to_ref, records, False, artifact_existence=artifact_existence 

2631 ) 

2632 

2633 # Now go through the records and propagate the ones that exist. 

2634 location_factory = source_datastore.locationFactory 

2635 for missing, record_list in records.items(): 

2636 # Skip completely if the ref does not exist. 

2637 ref = id_to_ref[missing] 

2638 if not ref_exists[ref]: 

2639 log.warning("Asked to transfer dataset %s but no file artifacts exist for it.", ref) 

2640 continue 

2641 # Check for file artifact to decide which parts of a 

2642 # disassembled composite do exist. If there is only a 

2643 # single record we don't even need to look because it can't 

2644 # be a composite and must exist. 

2645 if len(record_list) == 1: 

2646 dataset_records = record_list 

2647 else: 

2648 dataset_records = [ 

2649 record 

2650 for record in record_list 

2651 if artifact_existence[record.file_location(location_factory).uri] 

2652 ] 

2653 assert len(dataset_records) > 0, "Disassembled composite should have had some files." 

2654 

2655 # Rely on source_records being a defaultdict. 

2656 source_records[missing].extend(dataset_records) 

2657 

2658 # See if we already have these records 

2659 target_records = self._get_stored_records_associated_with_refs(refs) 

2660 

2661 # The artifacts to register 

2662 artifacts = [] 

2663 

2664 # Refs that already exist 

2665 already_present = [] 

2666 

2667 # Refs that were rejected by this datastore. 

2668 rejected = set() 

2669 

2670 # Refs that were transferred successfully. 

2671 accepted = set() 

2672 

2673 # Record each time we have done a "direct" transfer. 

2674 direct_transfers = [] 

2675 

2676 # Now can transfer the artifacts 

2677 for ref in refs: 

2678 if not self.constraints.isAcceptable(ref): 

2679 # This datastore should not be accepting this dataset. 

2680 rejected.add(ref) 

2681 continue 

2682 

2683 accepted.add(ref) 

2684 

2685 if ref.id in target_records: 

2686 # Already have an artifact for this. 

2687 already_present.append(ref) 

2688 continue 

2689 

2690 # mypy needs to know these are always resolved refs 

2691 for info in source_records[ref.id]: 

2692 source_location = info.file_location(source_datastore.locationFactory) 

2693 target_location = info.file_location(self.locationFactory) 

2694 if source_location == target_location and not source_location.pathInStore.isabs(): 

2695 # Artifact is already in the target location. 

2696 # (which is how execution butler currently runs) 

2697 pass 

2698 else: 

2699 if target_location.pathInStore.isabs(): 

2700 # Just because we can see the artifact when running 

2701 # the transfer doesn't mean it will be generally 

2702 # accessible to a user of this butler. Need to decide 

2703 # what to do about an absolute path. 

2704 if transfer == "auto": 

2705 # For "auto" transfers we allow the absolute URI 

2706 # to be recorded in the target datastore. 

2707 direct_transfers.append(source_location) 

2708 else: 

2709 # The user is explicitly requesting a transfer 

2710 # even for an absolute URI. This requires us to 

2711 # calculate the target path. 

2712 template_ref = ref 

2713 if info.component: 

2714 template_ref = ref.makeComponentRef(info.component) 

2715 target_location = self._calculate_ingested_datastore_name( 

2716 source_location.uri, 

2717 template_ref, 

2718 ) 

2719 

2720 info = info.update(path=target_location.pathInStore.path) 

2721 

2722 # Need to transfer it to the new location. 

2723 # Assume we should always overwrite. If the artifact 

2724 # is there this might indicate that a previous transfer 

2725 # was interrupted but was not able to be rolled back 

2726 # completely (eg pre-emption) so follow Datastore default 

2727 # and overwrite. 

2728 target_location.uri.transfer_from( 

2729 source_location.uri, transfer=transfer, overwrite=True, transaction=self._transaction 

2730 ) 

2731 

2732 artifacts.append((ref, info)) 

2733 

2734 if direct_transfers: 

2735 log.info( 

2736 "Transfer request for an outside-datastore artifact with absolute URI done %d time%s", 

2737 len(direct_transfers), 

2738 "" if len(direct_transfers) == 1 else "s", 

2739 ) 

2740 

2741 self._register_datasets(artifacts) 

2742 

2743 if already_present: 

2744 n_skipped = len(already_present) 

2745 log.info( 

2746 "Skipped transfer of %d dataset%s already present in datastore", 

2747 n_skipped, 

2748 "" if n_skipped == 1 else "s", 

2749 ) 

2750 

2751 return accepted, rejected 

2752 

2753 @transactional 

2754 def forget(self, refs: Iterable[DatasetRef]) -> None: 

2755 # Docstring inherited. 

2756 refs = list(refs) 

2757 self.bridge.forget(refs) 

2758 self._table.delete(["dataset_id"], *[{"dataset_id": ref.id} for ref in refs]) 

2759 

2760 def validateConfiguration( 

2761 self, entities: Iterable[Union[DatasetRef, DatasetType, StorageClass]], logFailures: bool = False 

2762 ) -> None: 

2763 """Validate some of the configuration for this datastore. 

2764 

2765 Parameters 

2766 ---------- 

2767 entities : iterable of `DatasetRef`, `DatasetType`, or `StorageClass` 

2768 Entities to test against this configuration. Can be differing 

2769 types. 

2770 logFailures : `bool`, optional 

2771 If `True`, output a log message for every validation error 

2772 detected. 

2773 

2774 Raises 

2775 ------ 

2776 DatastoreValidationError 

2777 Raised if there is a validation problem with a configuration. 

2778 All the problems are reported in a single exception. 

2779 

2780 Notes 

2781 ----- 

2782 This method checks that all the supplied entities have valid file 

2783 templates and also have formatters defined. 

2784 """ 

2785 

2786 templateFailed = None 

2787 try: 

2788 self.templates.validateTemplates(entities, logFailures=logFailures) 

2789 except FileTemplateValidationError as e: 

2790 templateFailed = str(e) 

2791 

2792 formatterFailed = [] 

2793 for entity in entities: 

2794 try: 

2795 self.formatterFactory.getFormatterClass(entity) 

2796 except KeyError as e: 

2797 formatterFailed.append(str(e)) 

2798 if logFailures: 

2799 log.critical("Formatter failure: %s", e) 

2800 

2801 if templateFailed or formatterFailed: 

2802 messages = [] 

2803 if templateFailed: 

2804 messages.append(templateFailed) 

2805 if formatterFailed: 

2806 messages.append(",".join(formatterFailed)) 

2807 msg = ";\n".join(messages) 

2808 raise DatastoreValidationError(msg) 

2809 

2810 def getLookupKeys(self) -> Set[LookupKey]: 

2811 # Docstring is inherited from base class 

2812 return ( 

2813 self.templates.getLookupKeys() 

2814 | self.formatterFactory.getLookupKeys() 

2815 | self.constraints.getLookupKeys() 

2816 ) 

2817 

2818 def validateKey(self, lookupKey: LookupKey, entity: Union[DatasetRef, DatasetType, StorageClass]) -> None: 

2819 # Docstring is inherited from base class 

2820 # The key can be valid in either formatters or templates so we can 

2821 # only check the template if it exists 

2822 if lookupKey in self.templates: 

2823 try: 

2824 self.templates[lookupKey].validateTemplate(entity) 

2825 except FileTemplateValidationError as e: 

2826 raise DatastoreValidationError(e) from e 

2827 

2828 def export( 

2829 self, 

2830 refs: Iterable[DatasetRef], 

2831 *, 

2832 directory: Optional[ResourcePathExpression] = None, 

2833 transfer: Optional[str] = "auto", 

2834 ) -> Iterable[FileDataset]: 

2835 # Docstring inherited from Datastore.export. 

2836 if transfer == "auto" and directory is None: 

2837 transfer = None 

2838 

2839 if transfer is not None and directory is None: 

2840 raise TypeError(f"Cannot export using transfer mode {transfer} with no export directory given") 

2841 

2842 if transfer == "move": 

2843 raise TypeError("Can not export by moving files out of datastore.") 

2844 elif transfer == "direct": 

2845 # For an export, treat this as equivalent to None. We do not 

2846 # want an import to risk using absolute URIs to datasets owned 

2847 # by another datastore. 

2848 log.info("Treating 'direct' transfer mode as in-place export.") 

2849 transfer = None 

2850 

2851 # Force the directory to be a URI object 

2852 directoryUri: Optional[ResourcePath] = None 

2853 if directory is not None: 

2854 directoryUri = ResourcePath(directory, forceDirectory=True) 

2855 

2856 if transfer is not None and directoryUri is not None: 

2857 # mypy needs the second test 

2858 if not directoryUri.exists(): 

2859 raise FileNotFoundError(f"Export location {directory} does not exist") 

2860 

2861 progress = Progress("lsst.daf.butler.datastores.FileDatastore.export", level=logging.DEBUG) 

2862 for ref in progress.wrap(refs, "Exporting dataset files"): 

2863 fileLocations = self._get_dataset_locations_info(ref) 

2864 if not fileLocations: 

2865 raise FileNotFoundError(f"Could not retrieve dataset {ref}.") 

2866 # For now we can not export disassembled datasets 

2867 if len(fileLocations) > 1: 

2868 raise NotImplementedError(f"Can not export disassembled datasets such as {ref}") 

2869 location, storedFileInfo = fileLocations[0] 

2870 

2871 pathInStore = location.pathInStore.path 

2872 if transfer is None: 

2873 # TODO: do we also need to return the readStorageClass somehow? 

2874 # We will use the path in store directly. If this is an 

2875 # absolute URI, preserve it. 

2876 if location.pathInStore.isabs(): 

2877 pathInStore = str(location.uri) 

2878 elif transfer == "direct": 

2879 # Use full URIs to the remote store in the export 

2880 pathInStore = str(location.uri) 

2881 else: 

2882 # mypy needs help 

2883 assert directoryUri is not None, "directoryUri must be defined to get here" 

2884 storeUri = ResourcePath(location.uri) 

2885 

2886 # if the datastore has an absolute URI to a resource, we 

2887 # have two options: 

2888 # 1. Keep the absolute URI in the exported YAML 

2889 # 2. Allocate a new name in the local datastore and transfer 

2890 # it. 

2891 # For now go with option 2 

2892 if location.pathInStore.isabs(): 

2893 template = self.templates.getTemplate(ref) 

2894 newURI = ResourcePath(template.format(ref), forceAbsolute=False) 

2895 pathInStore = str(newURI.updatedExtension(location.pathInStore.getExtension())) 

2896 

2897 exportUri = directoryUri.join(pathInStore) 

2898 exportUri.transfer_from(storeUri, transfer=transfer) 

2899 

2900 yield FileDataset(refs=[ref], path=pathInStore, formatter=storedFileInfo.formatter) 

2901 

2902 @staticmethod 

2903 def computeChecksum( 

2904 uri: ResourcePath, algorithm: str = "blake2b", block_size: int = 8192 

2905 ) -> Optional[str]: 

2906 """Compute the checksum of the supplied file. 

2907 

2908 Parameters 

2909 ---------- 

2910 uri : `lsst.resources.ResourcePath` 

2911 Name of resource to calculate checksum from. 

2912 algorithm : `str`, optional 

2913 Name of algorithm to use. Must be one of the algorithms supported 

2914 by :py:class`hashlib`. 

2915 block_size : `int` 

2916 Number of bytes to read from file at one time. 

2917 

2918 Returns 

2919 ------- 

2920 hexdigest : `str` 

2921 Hex digest of the file. 

2922 

2923 Notes 

2924 ----- 

2925 Currently returns None if the URI is for a remote resource. 

2926 """ 

2927 if algorithm not in hashlib.algorithms_guaranteed: 

2928 raise NameError("The specified algorithm '{}' is not supported by hashlib".format(algorithm)) 

2929 

2930 if not uri.isLocal: 

2931 return None 

2932 

2933 hasher = hashlib.new(algorithm) 

2934 

2935 with uri.as_local() as local_uri: 

2936 with open(local_uri.ospath, "rb") as f: 

2937 for chunk in iter(lambda: f.read(block_size), b""): 

2938 hasher.update(chunk) 

2939 

2940 return hasher.hexdigest() 

2941 

2942 def needs_expanded_data_ids( 

2943 self, 

2944 transfer: Optional[str], 

2945 entity: Optional[Union[DatasetRef, DatasetType, StorageClass]] = None, 

2946 ) -> bool: 

2947 # Docstring inherited. 

2948 # This _could_ also use entity to inspect whether the filename template 

2949 # involves placeholders other than the required dimensions for its 

2950 # dataset type, but that's not necessary for correctness; it just 

2951 # enables more optimizations (perhaps only in theory). 

2952 return transfer not in ("direct", None) 

2953 

2954 def import_records(self, data: Mapping[str, DatastoreRecordData]) -> None: 

2955 # Docstring inherited from the base class. 

2956 record_data = data.get(self.name) 

2957 if not record_data: 

2958 return 

2959 

2960 self._bridge.insert(FakeDatasetRef(dataset_id) for dataset_id in record_data.records.keys()) 

2961 

2962 # TODO: Verify that there are no unexpected table names in the dict? 

2963 unpacked_records = [] 

2964 for dataset_data in record_data.records.values(): 

2965 records = dataset_data.get(self._table.name) 

2966 if records: 

2967 for info in records: 

2968 assert isinstance(info, StoredFileInfo), "Expecting StoredFileInfo records" 

2969 unpacked_records.append(info.to_record()) 

2970 if unpacked_records: 

2971 self._table.insert(*unpacked_records, transaction=self._transaction) 

2972 

2973 def export_records(self, refs: Iterable[DatasetIdRef]) -> Mapping[str, DatastoreRecordData]: 

2974 # Docstring inherited from the base class. 

2975 exported_refs = list(self._bridge.check(refs)) 

2976 ids = {ref.id for ref in exported_refs} 

2977 records: dict[DatasetId, dict[str, list[StoredDatastoreItemInfo]]] = {id: {} for id in ids} 

2978 for row in self._table.fetch(dataset_id=ids): 

2979 info: StoredDatastoreItemInfo = StoredFileInfo.from_record(row) 

2980 dataset_records = records.setdefault(info.dataset_id, {}) 

2981 dataset_records.setdefault(self._table.name, []).append(info) 

2982 

2983 record_data = DatastoreRecordData(records=records) 

2984 return {self.name: record_data} 

2985 

2986 def set_retrieve_dataset_type_method(self, method: Callable[[str], DatasetType | None] | None) -> None: 

2987 # Docstring inherited from the base class. 

2988 self._retrieve_dataset_method = method 

2989 

2990 def _cast_storage_class(self, ref: DatasetRef) -> DatasetRef: 

2991 """Update dataset reference to use the storage class from registry. 

2992 

2993 This does nothing for regular datastores, and is only enabled for 

2994 trusted mode where we need to use registry definition of storage class 

2995 for some datastore methods. `set_retrieve_dataset_type_method` has to 

2996 be called beforehand. 

2997 """ 

2998 if self.trustGetRequest: 

2999 if self._retrieve_dataset_method is None: 

3000 # We could raise an exception here but unit tests do not define 

3001 # this method. 

3002 return ref 

3003 dataset_type = self._retrieve_dataset_method(ref.datasetType.name) 

3004 if dataset_type is not None: 

3005 ref = ref.overrideStorageClass(dataset_type.storageClass) 

3006 return ref