Coverage for python/lsst/daf/butler/datastores/posixDatastore.py : 88%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
# This file is part of daf_butler. # # Developed for the LSST Data Management System. # This product includes software developed by the LSST Project # (http://www.lsst.org). # See the COPYRIGHT file at the top-level directory of this distribution # for details of code ownership. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>.
from lsst.daf.butler import DatasetRef
"""Basic POSIX filesystem backed Datastore.
Parameters ---------- config : `DatastoreConfig` or `str` Configuration. A string should refer to the name of the config file. registry : `Registry` Registry to use for storing internal information about the datasets. butlerRoot : `str`, optional New datastore root to use to override the configuration value.
Raises ------ ValueError If root location does not exist and ``create`` is `False` in the configuration.
Notes ----- PosixDatastore supports all transfer modes for file-based ingest: `"move"`, `"copy"`, `"symlink"`, `"hardlink"`, and `None` (no transfer). """
"""Path to configuration defaults. Relative to $DAF_BUTLER_DIR/config or absolute path. Can be None if no defaults specified. """
raise ValueError(f"No valid root at: {self.root}")
"""Check if the dataset exists in the datastore.
Parameters ---------- ref : `DatasetRef` Reference to the required dataset.
Returns ------- exists : `bool` `True` if the entity exists in the `Datastore`. """
"""Load an InMemoryDataset from the store.
Parameters ---------- ref : `DatasetRef` Reference to the required Dataset. parameters : `dict` `StorageClass`-specific parameters that specify, for example, a slice of the Dataset to be loaded.
Returns ------- inMemoryDataset : `object` Requested Dataset or slice thereof as an InMemoryDataset.
Raises ------ FileNotFoundError Requested dataset can not be retrieved. TypeError Return value from formatter has unexpected type. ValueError Formatter failed to process the dataset. """
# Too expensive to recalculate the checksum on fetch # but we can check size and existence " expected location of {}".format(ref.id, location.path)) raise RuntimeError("Integrity failure in Datastore. Size of file {} ({}) does not" " match recorded size of {}".format(location.path, size, storedFileInfo.file_size))
except Exception as e: raise ValueError(f"Failure from formatter '{formatter.name()}' for Dataset {ref.id}") from e
def put(self, inMemoryDataset, ref): """Write a InMemoryDataset with a given `DatasetRef` to the store.
Parameters ---------- inMemoryDataset : `object` The Dataset to store. ref : `DatasetRef` Reference to the associated Dataset.
Raises ------ TypeError Supplied object and storage class are inconsistent. DatasetTypeNotSupportedError The associated `DatasetType` is not handled by this datastore.
Notes ----- If the datastore is configured to reject certain dataset types it is possible that the put will fail and raise a `DatasetTypeNotSupportedError`. The main use case for this is to allow `ChainedDatastore` to put to multiple datastores without requiring that every datastore accepts the dataset. """
# Write the file
f"output file {predictedFullPath} already exists")
# Docstring inherited from FileLikeDatastore._standardizeIngestPath. f"are assumed to be relative to self.root unless they are absolute.") return os.path.relpath(path, absRoot) raise RuntimeError(f"'{path}' is outside repository root '{self.root}.'")
transfer: Optional[str] = None) -> StoredFileInfo: # Docstring inherited from FileLikeDatastore._extractIngestInfo. else: raise NotImplementedError("Transfer type '{}' not supported.".format(transfer)) file_size=size, checksum=checksum)
"""Indicate to the Datastore that a Dataset can be removed.
.. warning::
This method does not support transactions; removals are immediate, cannot be undone, and are not guaranteed to be atomic if deleting either the file or the internal database records fails.
Parameters ---------- ref : `DatasetRef` Reference to the required Dataset.
Raises ------ FileNotFoundError Attempt to remove a dataset that does not exist. """ # Get file metadata and internal metadata
raise FileNotFoundError(f"No such file: {location.uri}")
# Remove rows from registries
"""Compute the checksum of the supplied file.
Parameters ---------- filename : `str` Name of file to calculate checksum from. algorithm : `str`, optional Name of algorithm to use. Must be one of the algorithms supported by :py:class`hashlib`. block_size : `int` Number of bytes to read from file at one time.
Returns ------- hexdigest : `str` Hex digest of the file. """ raise NameError("The specified algorithm '{}' is not supported by hashlib".format(algorithm))
directory: Optional[str] = None, transfer: Optional[str] = None) -> Iterable[FileDataset]: # Docstring inherited from Datastore.export. raise FileNotFoundError(f"Could not retrieve Dataset {ref}.") # TODO: do we also need to return the readStorageClass somehow? else: # TODO: add support for other transfer modes. If we support # moving, this method should become transactional. raise NotImplementedError(f"Transfer mode '{transfer}' not yet supported.") |