Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

# This file is part of daf_butler. 

# 

# Developed for the LSST Data Management System. 

# This product includes software developed by the LSST Project 

# (http://www.lsst.org). 

# See the COPYRIGHT file at the top-level directory of this distribution 

# for details of code ownership. 

# 

# This program is free software: you can redistribute it and/or modify 

# it under the terms of the GNU General Public License as published by 

# the Free Software Foundation, either version 3 of the License, or 

# (at your option) any later version. 

# 

# This program is distributed in the hope that it will be useful, 

# but WITHOUT ANY WARRANTY; without even the implied warranty of 

# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

# GNU General Public License for more details. 

# 

# You should have received a copy of the GNU General Public License 

# along with this program. If not, see <http://www.gnu.org/licenses/>. 

 

"""POSIX datastore.""" 

 

__all__ = ("PosixDatastore", ) 

 

import hashlib 

import logging 

import os 

import shutil 

 

from lsst.daf.butler import DatasetTypeNotSupportedError 

 

from .fileLikeDatastore import FileLikeDatastore 

from lsst.daf.butler.core.safeFileIo import safeMakeDir 

from lsst.daf.butler.core.utils import transactional 

 

log = logging.getLogger(__name__) 

 

 

class PosixDatastore(FileLikeDatastore): 

"""Basic POSIX filesystem backed Datastore. 

 

Parameters 

---------- 

config : `DatastoreConfig` or `str` 

Configuration. A string should refer to the name of the config file. 

registry : `Registry` 

Registry to use for storing internal information about the datasets. 

butlerRoot : `str`, optional 

New datastore root to use to override the configuration value. 

 

Raises 

------ 

ValueError 

If root location does not exist and ``create`` is `False` in the 

configuration. 

""" 

 

defaultConfigFile = "datastores/posixDatastore.yaml" 

"""Path to configuration defaults. Relative to $DAF_BUTLER_DIR/config or 

absolute path. Can be None if no defaults specified. 

""" 

 

def __init__(self, config, registry, butlerRoot=None): 

super().__init__(config, registry, butlerRoot) 

 

if not os.path.isdir(self.root): 

68 ↛ 69line 68 didn't jump to line 69, because the condition on line 68 was never true if "create" not in self.config or not self.config["create"]: 

raise ValueError(f"No valid root at: {self.root}") 

safeMakeDir(self.root) 

 

def exists(self, ref): 

"""Check if the dataset exists in the datastore. 

 

Parameters 

---------- 

ref : `DatasetRef` 

Reference to the required dataset. 

 

Returns 

------- 

exists : `bool` 

`True` if the entity exists in the `Datastore`. 

""" 

location, _ = self._get_dataset_location_info(ref) 

if location is None: 

return False 

return os.path.exists(location.path) 

 

def get(self, ref, parameters=None): 

"""Load an InMemoryDataset from the store. 

 

Parameters 

---------- 

ref : `DatasetRef` 

Reference to the required Dataset. 

parameters : `dict` 

`StorageClass`-specific parameters that specify, for example, 

a slice of the Dataset to be loaded. 

 

Returns 

------- 

inMemoryDataset : `object` 

Requested Dataset or slice thereof as an InMemoryDataset. 

 

Raises 

------ 

FileNotFoundError 

Requested dataset can not be retrieved. 

TypeError 

Return value from formatter has unexpected type. 

ValueError 

Formatter failed to process the dataset. 

""" 

getInfo = self._prepare_for_get(ref, parameters) 

location = getInfo.location 

 

# Too expensive to recalculate the checksum on fetch 

# but we can check size and existence 

if not os.path.exists(location.path): 

raise FileNotFoundError("Dataset with Id {} does not seem to exist at" 

" expected location of {}".format(ref.id, location.path)) 

stat = os.stat(location.path) 

size = stat.st_size 

storedFileInfo = getInfo.info 

126 ↛ 127line 126 didn't jump to line 127, because the condition on line 126 was never true if size != storedFileInfo.file_size: 

raise RuntimeError("Integrity failure in Datastore. Size of file {} ({}) does not" 

" match recorded size of {}".format(location.path, size, 

storedFileInfo.file_size)) 

 

formatter = getInfo.formatter 

try: 

result = formatter.read(component=getInfo.component) 

except Exception as e: 

raise ValueError(f"Failure from formatter '{formatter.name()}' for Dataset {ref.id}") from e 

 

return self._post_process_get(result, getInfo.readStorageClass, getInfo.assemblerParams) 

 

@transactional 

def put(self, inMemoryDataset, ref): 

"""Write a InMemoryDataset with a given `DatasetRef` to the store. 

 

Parameters 

---------- 

inMemoryDataset : `object` 

The Dataset to store. 

ref : `DatasetRef` 

Reference to the associated Dataset. 

 

Raises 

------ 

TypeError 

Supplied object and storage class are inconsistent. 

DatasetTypeNotSupportedError 

The associated `DatasetType` is not handled by this datastore. 

 

Notes 

----- 

If the datastore is configured to reject certain dataset types it 

is possible that the put will fail and raise a 

`DatasetTypeNotSupportedError`. The main use case for this is to 

allow `ChainedDatastore` to put to multiple datastores without 

requiring that every datastore accepts the dataset. 

""" 

location, formatter = self._prepare_for_put(inMemoryDataset, ref) 

 

storageDir = os.path.dirname(location.path) 

if not os.path.isdir(storageDir): 

with self._transaction.undoWith("mkdir", os.rmdir, storageDir): 

safeMakeDir(storageDir) 

 

# Write the file 

predictedFullPath = os.path.join(self.root, formatter.predictPath()) 

 

if os.path.exists(predictedFullPath): 

raise FileExistsError(f"Cannot write file for ref {ref} as " 

f"output file {predictedFullPath} already exists") 

 

with self._transaction.undoWith("write", os.remove, predictedFullPath): 

path = formatter.write(inMemoryDataset) 

assert predictedFullPath == os.path.join(self.root, path) 

log.debug("Wrote file to %s", path) 

 

self.ingest(path, ref, formatter=formatter) 

 

@transactional 

def ingest(self, path, ref, formatter=None, transfer=None): 

"""Add an on-disk file with the given `DatasetRef` to the store, 

possibly transferring it. 

 

The caller is responsible for ensuring that the given (or predicted) 

Formatter is consistent with how the file was written; `ingest` will 

in general silently ignore incorrect formatters (as it cannot 

efficiently verify their correctness), deferring errors until ``get`` 

is first called on the ingested dataset. 

 

Parameters 

---------- 

path : `str` 

File path. Treated as relative to the repository root if not 

absolute. 

ref : `DatasetRef` 

Reference to the associated Dataset. 

formatter : `Formatter`, optional 

Formatter that should be used to retreive the Dataset. If not 

provided, the formatter will be constructed according to 

Datastore configuration. Can be a the Formatter class or an 

instance. 

transfer : str (optional) 

If not None, must be one of 'move', 'copy', 'hardlink', or 

'symlink' indicating how to transfer the file. The new 

filename and location will be determined via template substitution, 

as with ``put``. If the file is outside the datastore root, it 

must be transferred somehow. 

 

Raises 

------ 

RuntimeError 

Raised if ``transfer is None`` and path is outside the repository 

root. 

FileNotFoundError 

Raised if the file at ``path`` does not exist. 

FileExistsError 

Raised if ``transfer is not None`` but a file already exists at the 

location computed from the template. 

DatasetTypeNotSupportedError 

The associated `DatasetType` is not handled by this datastore. 

""" 

 

# Confirm that we can accept this dataset 

if not self.constraints.isAcceptable(ref): 

# Raise rather than use boolean return value. 

raise DatasetTypeNotSupportedError(f"Dataset {ref} has been rejected by this datastore via" 

" configuration.") 

 

if formatter is None: 

formatter = self.formatterFactory.getFormatterClass(ref) 

 

fullPath = os.path.normpath(os.path.join(self.root, path)) 

if not os.path.exists(fullPath): 

raise FileNotFoundError("File at '{}' does not exist; note that paths to ingest are " 

"assumed to be relative to self.root unless they are absolute." 

.format(fullPath)) 

 

if transfer is None: 

if os.path.isabs(path): 

absRoot = os.path.abspath(self.root) 

248 ↛ 250line 248 didn't jump to line 250, because the condition on line 248 was never false if os.path.commonpath([absRoot, path]) != absRoot: 

raise RuntimeError("'{}' is not inside repository root '{}'".format(path, self.root)) 

path = os.path.relpath(path, absRoot) 

251 ↛ 252line 251 didn't jump to line 252, because the condition on line 251 was never true elif path.startswith(os.path.pardir): 

raise RuntimeError(f"'{path}' is outside repository root '{self.root}'") 

else: 

template = self.templates.getTemplate(ref) 

location = self.locationFactory.fromPath(template.format(ref)) 

newPath = formatter.predictPathFromLocation(location) 

newFullPath = os.path.join(self.root, newPath) 

if os.path.exists(newFullPath): 

raise FileExistsError("File '{}' already exists".format(newFullPath)) 

storageDir = os.path.dirname(newFullPath) 

if not os.path.isdir(storageDir): 

with self._transaction.undoWith("mkdir", os.rmdir, storageDir): 

safeMakeDir(storageDir) 

if transfer == "move": 

with self._transaction.undoWith("move", shutil.move, newFullPath, fullPath): 

shutil.move(fullPath, newFullPath) 

elif transfer == "copy": 

with self._transaction.undoWith("copy", os.remove, newFullPath): 

shutil.copy(fullPath, newFullPath) 

elif transfer == "hardlink": 

with self._transaction.undoWith("hardlink", os.unlink, newFullPath): 

os.link(fullPath, newFullPath) 

273 ↛ 277line 273 didn't jump to line 277, because the condition on line 273 was never false elif transfer == "symlink": 

with self._transaction.undoWith("symlink", os.unlink, newFullPath): 

os.symlink(fullPath, newFullPath) 

else: 

raise NotImplementedError("Transfer type '{}' not supported.".format(transfer)) 

path = newPath 

fullPath = newFullPath 

 

# Create Storage information in the registry 

checksum = self.computeChecksum(fullPath) 

stat = os.stat(fullPath) 

size = stat.st_size 

 

# Update the registry 

self._register_dataset_file(ref, formatter, path, size, checksum) 

 

def remove(self, ref): 

"""Indicate to the Datastore that a Dataset can be removed. 

 

.. warning:: 

 

This method does not support transactions; removals are 

immediate, cannot be undone, and are not guaranteed to 

be atomic if deleting either the file or the internal 

database records fails. 

 

Parameters 

---------- 

ref : `DatasetRef` 

Reference to the required Dataset. 

 

Raises 

------ 

FileNotFoundError 

Attempt to remove a dataset that does not exist. 

""" 

# Get file metadata and internal metadata 

location, storefFileInfo = self._get_dataset_location_info(ref) 

if location is None: 

raise FileNotFoundError(f"Requested dataset ({ref}) does not exist") 

 

314 ↛ 315line 314 didn't jump to line 315, because the condition on line 314 was never true if not os.path.exists(location.path): 

raise FileNotFoundError(f"No such file: {location.uri}") 

os.remove(location.path) 

 

# Remove rows from registries 

self._remove_from_registry(ref) 

 

@staticmethod 

def computeChecksum(filename, algorithm="blake2b", block_size=8192): 

"""Compute the checksum of the supplied file. 

 

Parameters 

---------- 

filename : `str` 

Name of file to calculate checksum from. 

algorithm : `str`, optional 

Name of algorithm to use. Must be one of the algorithms supported 

by :py:class`hashlib`. 

block_size : `int` 

Number of bytes to read from file at one time. 

 

Returns 

------- 

hexdigest : `str` 

Hex digest of the file. 

""" 

340 ↛ 341line 340 didn't jump to line 341, because the condition on line 340 was never true if algorithm not in hashlib.algorithms_guaranteed: 

raise NameError("The specified algorithm '{}' is not supported by hashlib".format(algorithm)) 

 

hasher = hashlib.new(algorithm) 

 

with open(filename, "rb") as f: 

for chunk in iter(lambda: f.read(block_size), b""): 

hasher.update(chunk) 

 

return hasher.hexdigest()