Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22import os 

23import unittest 

24import shutil 

25import yaml 

26import tempfile 

27import lsst.utils 

28 

29from lsst.utils import doImport 

30 

31from lsst.daf.butler import StorageClassFactory, StorageClass, DimensionUniverse, FileDataset 

32from lsst.daf.butler import DatastoreConfig, DatasetTypeNotSupportedError, DatastoreValidationError 

33from lsst.daf.butler import ButlerURI 

34from lsst.daf.butler.formatters.yamlFormatter import YamlFormatter 

35 

36from lsst.daf.butler.tests import (DatasetTestHelper, DatastoreTestHelper, BadWriteFormatter, 

37 BadNoWriteFormatter, MetricsExample, DummyRegistry) 

38 

39 

40TESTDIR = os.path.dirname(__file__) 

41 

42 

43def makeExampleMetrics(): 

44 return MetricsExample({"AM1": 5.2, "AM2": 30.6}, 

45 {"a": [1, 2, 3], 

46 "b": {"blue": 5, "red": "green"}}, 

47 [563, 234, 456.7] 

48 ) 

49 

50 

51class TransactionTestError(Exception): 

52 """Specific error for transactions, to prevent misdiagnosing 

53 that might otherwise occur when a standard exception is used. 

54 """ 

55 pass 

56 

57 

58class DatastoreTestsBase(DatasetTestHelper, DatastoreTestHelper): 

59 """Support routines for datastore testing""" 

60 root = None 

61 

62 @classmethod 

63 def setUpClass(cls): 

64 # Storage Classes are fixed for all datastores in these tests 

65 scConfigFile = os.path.join(TESTDIR, "config/basic/storageClasses.yaml") 

66 cls.storageClassFactory = StorageClassFactory() 

67 cls.storageClassFactory.addFromConfig(scConfigFile) 

68 

69 # Read the Datastore config so we can get the class 

70 # information (since we should not assume the constructor 

71 # name here, but rely on the configuration file itself) 

72 datastoreConfig = DatastoreConfig(cls.configFile) 

73 cls.datastoreType = doImport(datastoreConfig["cls"]) 

74 cls.universe = DimensionUniverse() 

75 

76 def setUp(self): 

77 self.setUpDatastoreTests(DummyRegistry, DatastoreConfig) 

78 

79 def tearDown(self): 

80 if self.root is not None and os.path.exists(self.root): 

81 shutil.rmtree(self.root, ignore_errors=True) 

82 

83 

84class DatastoreTests(DatastoreTestsBase): 

85 """Some basic tests of a simple datastore.""" 

86 

87 hasUnsupportedPut = True 

88 

89 def testConfigRoot(self): 

90 full = DatastoreConfig(self.configFile) 

91 config = DatastoreConfig(self.configFile, mergeDefaults=False) 

92 newroot = "/random/location" 

93 self.datastoreType.setConfigRoot(newroot, config, full) 

94 if self.rootKeys: 

95 for k in self.rootKeys: 

96 self.assertIn(newroot, config[k]) 

97 

98 def testConstructor(self): 

99 datastore = self.makeDatastore() 

100 self.assertIsNotNone(datastore) 

101 self.assertIs(datastore.isEphemeral, self.isEphemeral) 

102 

103 def testConfigurationValidation(self): 

104 datastore = self.makeDatastore() 

105 sc = self.storageClassFactory.getStorageClass("ThingOne") 

106 datastore.validateConfiguration([sc]) 

107 

108 sc2 = self.storageClassFactory.getStorageClass("ThingTwo") 

109 if self.validationCanFail: 

110 with self.assertRaises(DatastoreValidationError): 

111 datastore.validateConfiguration([sc2], logFailures=True) 

112 

113 dimensions = self.universe.extract(("visit", "physical_filter")) 

114 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"} 

115 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False) 

116 datastore.validateConfiguration([ref]) 

117 

118 def testParameterValidation(self): 

119 """Check that parameters are validated""" 

120 sc = self.storageClassFactory.getStorageClass("ThingOne") 

121 dimensions = self.universe.extract(("visit", "physical_filter")) 

122 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"} 

123 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False) 

124 datastore = self.makeDatastore() 

125 data = {1: 2, 3: 4} 

126 datastore.put(data, ref) 

127 newdata = datastore.get(ref) 

128 self.assertEqual(data, newdata) 

129 with self.assertRaises(KeyError): 

130 newdata = datastore.get(ref, parameters={"missing": 5}) 

131 

132 def testBasicPutGet(self): 

133 metrics = makeExampleMetrics() 

134 datastore = self.makeDatastore() 

135 

136 # Create multiple storage classes for testing different formulations 

137 storageClasses = [self.storageClassFactory.getStorageClass(sc) 

138 for sc in ("StructuredData", 

139 "StructuredDataJson", 

140 "StructuredDataPickle")] 

141 

142 dimensions = self.universe.extract(("visit", "physical_filter")) 

143 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"} 

144 

145 for sc in storageClasses: 

146 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False) 

147 print("Using storageClass: {}".format(sc.name)) 

148 datastore.put(metrics, ref) 

149 

150 # Does it exist? 

151 self.assertTrue(datastore.exists(ref)) 

152 

153 # Get 

154 metricsOut = datastore.get(ref, parameters=None) 

155 self.assertEqual(metrics, metricsOut) 

156 

157 uri = datastore.getUri(ref) 

158 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme) 

159 

160 # Get a component -- we need to construct new refs for them 

161 # with derived storage classes but with parent ID 

162 comp = "output" 

163 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(comp), dimensions, 

164 sc.components[comp], dataId, id=ref.id) 

165 output = datastore.get(compRef) 

166 self.assertEqual(output, metricsOut.output) 

167 

168 uri = datastore.getUri(compRef) 

169 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme) 

170 

171 storageClass = sc 

172 

173 # Check that a put fails if the dataset type is not supported 

174 if self.hasUnsupportedPut: 

175 sc = StorageClass("UnsupportedSC", pytype=type(metrics)) 

176 ref = self.makeDatasetRef("unsupportedType", dimensions, sc, dataId) 

177 with self.assertRaises(DatasetTypeNotSupportedError): 

178 datastore.put(metrics, ref) 

179 

180 # These should raise 

181 ref = self.makeDatasetRef("metrics", dimensions, storageClass, dataId, id=10000) 

182 with self.assertRaises(FileNotFoundError): 

183 # non-existing file 

184 datastore.get(ref) 

185 

186 # Get a URI from it 

187 uri = datastore.getUri(ref, predict=True) 

188 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme) 

189 

190 with self.assertRaises(FileNotFoundError): 

191 datastore.getUri(ref) 

192 

193 def testCompositePutGet(self): 

194 metrics = makeExampleMetrics() 

195 datastore = self.makeDatastore() 

196 

197 # Create multiple storage classes for testing different formulations 

198 # of composites 

199 storageClasses = [self.storageClassFactory.getStorageClass(sc) 

200 for sc in ("StructuredComposite", 

201 "StructuredCompositeTestA", 

202 "StructuredCompositeTestB")] 

203 

204 dimensions = self.universe.extract(("visit", "physical_filter")) 

205 dataId = {"instrument": "dummy", "visit": 428, "physical_filter": "R"} 

206 

207 for sc in storageClasses: 

208 print("Using storageClass: {}".format(sc.name)) 

209 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, 

210 conform=False) 

211 

212 components = sc.assembler().disassemble(metrics) 

213 self.assertTrue(components) 

214 

215 compsRead = {} 

216 for compName, compInfo in components.items(): 

217 compRef = self.makeDatasetRef(ref.datasetType.componentTypeName(compName), dimensions, 

218 components[compName].storageClass, dataId, 

219 conform=False) 

220 

221 print("Writing component {} with {}".format(compName, compRef.datasetType.storageClass.name)) 

222 datastore.put(compInfo.component, compRef) 

223 

224 uri = datastore.getUri(compRef) 

225 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme) 

226 

227 compsRead[compName] = datastore.get(compRef) 

228 

229 # We can generate identical files for each storage class 

230 # so remove the component here 

231 datastore.remove(compRef) 

232 

233 # combine all the components we read back into a new composite 

234 metricsOut = sc.assembler().assemble(compsRead) 

235 self.assertEqual(metrics, metricsOut) 

236 

237 def testRemove(self): 

238 metrics = makeExampleMetrics() 

239 datastore = self.makeDatastore() 

240 # Put 

241 dimensions = self.universe.extract(("visit", "physical_filter")) 

242 dataId = {"instrument": "dummy", "visit": 638, "physical_filter": "U"} 

243 

244 sc = self.storageClassFactory.getStorageClass("StructuredData") 

245 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False) 

246 datastore.put(metrics, ref) 

247 

248 # Does it exist? 

249 self.assertTrue(datastore.exists(ref)) 

250 

251 # Get 

252 metricsOut = datastore.get(ref) 

253 self.assertEqual(metrics, metricsOut) 

254 # Remove 

255 datastore.remove(ref) 

256 

257 # Does it exist? 

258 self.assertFalse(datastore.exists(ref)) 

259 

260 # Do we now get a predicted URI? 

261 uri = datastore.getUri(ref, predict=True) 

262 self.assertTrue(uri.endswith("#predicted")) 

263 

264 # Get should now fail 

265 with self.assertRaises(FileNotFoundError): 

266 datastore.get(ref) 

267 # Can only delete once 

268 with self.assertRaises(FileNotFoundError): 

269 datastore.remove(ref) 

270 

271 def testTransfer(self): 

272 metrics = makeExampleMetrics() 

273 

274 dimensions = self.universe.extract(("visit", "physical_filter")) 

275 dataId = {"instrument": "dummy", "visit": 2048, "physical_filter": "Uprime"} 

276 

277 sc = self.storageClassFactory.getStorageClass("StructuredData") 

278 ref = self.makeDatasetRef("metric", dimensions, sc, dataId, conform=False) 

279 

280 inputDatastore = self.makeDatastore("test_input_datastore") 

281 outputDatastore = self.makeDatastore("test_output_datastore") 

282 

283 inputDatastore.put(metrics, ref) 

284 outputDatastore.transfer(inputDatastore, ref) 

285 

286 metricsOut = outputDatastore.get(ref) 

287 self.assertEqual(metrics, metricsOut) 

288 

289 def testBasicTransaction(self): 

290 datastore = self.makeDatastore() 

291 storageClass = self.storageClassFactory.getStorageClass("StructuredData") 

292 dimensions = self.universe.extract(("visit", "physical_filter")) 

293 nDatasets = 6 

294 dataIds = [{"instrument": "dummy", "visit": i, "physical_filter": "V"} for i in range(nDatasets)] 

295 data = [(self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False), 

296 makeExampleMetrics(),) 

297 for dataId in dataIds] 

298 succeed = data[:nDatasets//2] 

299 fail = data[nDatasets//2:] 

300 # All datasets added in this transaction should continue to exist 

301 with datastore.transaction(): 

302 for ref, metrics in succeed: 

303 datastore.put(metrics, ref) 

304 # Whereas datasets added in this transaction should not 

305 with self.assertRaises(TransactionTestError): 

306 with datastore.transaction(): 

307 for ref, metrics in fail: 

308 datastore.put(metrics, ref) 

309 raise TransactionTestError("This should propagate out of the context manager") 

310 # Check for datasets that should exist 

311 for ref, metrics in succeed: 

312 # Does it exist? 

313 self.assertTrue(datastore.exists(ref)) 

314 # Get 

315 metricsOut = datastore.get(ref, parameters=None) 

316 self.assertEqual(metrics, metricsOut) 

317 # URI 

318 uri = datastore.getUri(ref) 

319 self.assertEqual(uri[:len(self.uriScheme)], self.uriScheme) 

320 # Check for datasets that should not exist 

321 for ref, _ in fail: 

322 # These should raise 

323 with self.assertRaises(FileNotFoundError): 

324 # non-existing file 

325 datastore.get(ref) 

326 with self.assertRaises(FileNotFoundError): 

327 datastore.getUri(ref) 

328 

329 def testNestedTransaction(self): 

330 datastore = self.makeDatastore() 

331 storageClass = self.storageClassFactory.getStorageClass("StructuredData") 

332 dimensions = self.universe.extract(("visit", "physical_filter")) 

333 metrics = makeExampleMetrics() 

334 

335 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"} 

336 refBefore = self.makeDatasetRef("metric", dimensions, storageClass, dataId, 

337 conform=False) 

338 datastore.put(metrics, refBefore) 

339 with self.assertRaises(TransactionTestError): 

340 with datastore.transaction(): 

341 dataId = {"instrument": "dummy", "visit": 1, "physical_filter": "V"} 

342 refOuter = self.makeDatasetRef("metric", dimensions, storageClass, dataId, 

343 conform=False) 

344 datastore.put(metrics, refOuter) 

345 with datastore.transaction(): 

346 dataId = {"instrument": "dummy", "visit": 2, "physical_filter": "V"} 

347 refInner = self.makeDatasetRef("metric", dimensions, storageClass, dataId, 

348 conform=False) 

349 datastore.put(metrics, refInner) 

350 # All datasets should exist 

351 for ref in (refBefore, refOuter, refInner): 

352 metricsOut = datastore.get(ref, parameters=None) 

353 self.assertEqual(metrics, metricsOut) 

354 raise TransactionTestError("This should roll back the transaction") 

355 # Dataset(s) inserted before the transaction should still exist 

356 metricsOut = datastore.get(refBefore, parameters=None) 

357 self.assertEqual(metrics, metricsOut) 

358 # But all datasets inserted during the (rolled back) transaction 

359 # should be gone 

360 with self.assertRaises(FileNotFoundError): 

361 datastore.get(refOuter) 

362 with self.assertRaises(FileNotFoundError): 

363 datastore.get(refInner) 

364 

365 def runIngestTest(self, func, expectOutput=True): 

366 storageClass = self.storageClassFactory.getStorageClass("StructuredData") 

367 dimensions = self.universe.extract(("visit", "physical_filter")) 

368 metrics = makeExampleMetrics() 

369 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"} 

370 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False) 

371 with lsst.utils.tests.getTempFilePath(".yaml", expectOutput=expectOutput) as path: 

372 with open(path, 'w') as fd: 

373 yaml.dump(metrics._asdict(), stream=fd) 

374 func(metrics, path, ref) 

375 

376 def testIngestNoTransfer(self): 

377 """Test ingesting existing files with no transfer. 

378 """ 

379 datastore = self.makeDatastore() 

380 

381 def succeed(obj, path, ref): 

382 """Ingest a file already in the datastore root.""" 

383 # first move it into the root, and adjust the path accordingly 

384 path = shutil.copy(path, datastore.root) 

385 path = os.path.relpath(path, start=datastore.root) 

386 datastore.ingest(FileDataset(path=path, refs=ref), transfer=None) 

387 self.assertEqual(obj, datastore.get(ref)) 

388 

389 def failInputDoesNotExist(obj, path, ref): 

390 """Can't ingest files if we're given a bad path.""" 

391 with self.assertRaises(FileNotFoundError): 

392 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref), transfer=None) 

393 self.assertFalse(datastore.exists(ref)) 

394 

395 def failOutsideRoot(obj, path, ref): 

396 """Can't ingest files outside of datastore root.""" 

397 with self.assertRaises(RuntimeError): 

398 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=None) 

399 self.assertFalse(datastore.exists(ref)) 

400 

401 def failNotImplemented(obj, path, ref): 

402 with self.assertRaises(NotImplementedError): 

403 datastore.ingest(FileDataset(path=path, refs=ref), transfer=None) 

404 

405 if None in self.ingestTransferModes: 

406 self.runIngestTest(failOutsideRoot) 

407 self.runIngestTest(failInputDoesNotExist) 

408 self.runIngestTest(succeed) 

409 else: 

410 self.runIngestTest(failNotImplemented) 

411 

412 def testIngestTransfer(self): 

413 """Test ingesting existing files after transferring them. 

414 """ 

415 for mode in ("copy", "move", "hardlink", "symlink"): 

416 with self.subTest(mode=mode): 

417 datastore = self.makeDatastore(mode) 

418 

419 def succeed(obj, path, ref): 

420 """Ingest a file by transferring it to the template 

421 location.""" 

422 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode) 

423 self.assertEqual(obj, datastore.get(ref)) 

424 

425 def failInputDoesNotExist(obj, path, ref): 

426 """Can't ingest files if we're given a bad path.""" 

427 with self.assertRaises(FileNotFoundError): 

428 datastore.ingest(FileDataset(path="this-file-does-not-exist.yaml", refs=ref), 

429 transfer=mode) 

430 self.assertFalse(datastore.exists(ref)) 

431 

432 def failOutputExists(obj, path, ref): 

433 """Can't ingest files if transfer destination already 

434 exists.""" 

435 with self.assertRaises(FileExistsError): 

436 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode) 

437 self.assertFalse(datastore.exists(ref)) 

438 

439 def failNotImplemented(obj, path, ref): 

440 with self.assertRaises(NotImplementedError): 

441 datastore.ingest(FileDataset(path=os.path.abspath(path), refs=ref), transfer=mode) 

442 

443 if mode in self.ingestTransferModes: 

444 self.runIngestTest(failInputDoesNotExist) 

445 self.runIngestTest(succeed, expectOutput=(mode != "move")) 

446 self.runIngestTest(failOutputExists) 

447 else: 

448 self.runIngestTest(failNotImplemented) 

449 

450 

451class PosixDatastoreTestCase(DatastoreTests, unittest.TestCase): 

452 """PosixDatastore specialization""" 

453 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml") 

454 uriScheme = "file:" 

455 ingestTransferModes = (None, "copy", "move", "hardlink", "symlink") 

456 isEphemeral = False 

457 rootKeys = ("root",) 

458 validationCanFail = True 

459 

460 def setUp(self): 

461 # Override the working directory before calling the base class 

462 self.root = tempfile.mkdtemp(dir=TESTDIR) 

463 super().setUp() 

464 

465 

466class PosixDatastoreNoChecksumsTestCase(PosixDatastoreTestCase): 

467 """Posix datastore tests but with checksums disabled.""" 

468 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreNoChecksums.yaml") 

469 

470 def testChecksum(self): 

471 """Ensure that checksums have not been calculated.""" 

472 

473 datastore = self.makeDatastore() 

474 storageClass = self.storageClassFactory.getStorageClass("StructuredData") 

475 dimensions = self.universe.extract(("visit", "physical_filter")) 

476 metrics = makeExampleMetrics() 

477 

478 dataId = {"instrument": "dummy", "visit": 0, "physical_filter": "V"} 

479 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, 

480 conform=False) 

481 

482 # Configuration should have disabled checksum calculation 

483 datastore.put(metrics, ref) 

484 info = datastore.getStoredItemInfo(ref) 

485 self.assertIsNone(info.checksum) 

486 

487 # Remove put back but with checksums enabled explicitly 

488 datastore.remove(ref) 

489 datastore.useChecksum = True 

490 datastore.put(metrics, ref) 

491 

492 info = datastore.getStoredItemInfo(ref) 

493 self.assertIsNotNone(info.checksum) 

494 

495 

496class CleanupPosixDatastoreTestCase(DatastoreTestsBase, unittest.TestCase): 

497 configFile = os.path.join(TESTDIR, "config/basic/butler.yaml") 

498 

499 def setUp(self): 

500 # Override the working directory before calling the base class 

501 self.root = tempfile.mkdtemp(dir=TESTDIR) 

502 super().setUp() 

503 

504 def testCleanup(self): 

505 """Test that a failed formatter write does cleanup a partial file.""" 

506 metrics = makeExampleMetrics() 

507 datastore = self.makeDatastore() 

508 

509 storageClass = self.storageClassFactory.getStorageClass("StructuredData") 

510 

511 dimensions = self.universe.extract(("visit", "physical_filter")) 

512 dataId = {"instrument": "dummy", "visit": 52, "physical_filter": "V"} 

513 

514 ref = self.makeDatasetRef("metric", dimensions, storageClass, dataId, conform=False) 

515 

516 # Determine where the file will end up (we assume Formatters use 

517 # the same file extension) 

518 expectedUri = datastore.getUri(ref, predict=True) 

519 self.assertTrue(expectedUri.endswith(".yaml#predicted"), 

520 f"Is there a file extension in {expectedUri}") 

521 

522 # Convert to ButlerURI so we can extract the path component 

523 expectedUri = ButlerURI(expectedUri) 

524 expectedFile = expectedUri.path 

525 

526 # Try formatter that fails and formatter that fails and leaves 

527 # a file behind 

528 for formatter in (BadWriteFormatter, BadNoWriteFormatter): 

529 with self.subTest(formatter=formatter): 

530 

531 # Monkey patch the formatter 

532 datastore.formatterFactory.registerFormatter(ref.datasetType, formatter, 

533 overwrite=True) 

534 

535 # Try to put the dataset, it should fail 

536 with self.assertRaises(Exception): 

537 datastore.put(metrics, ref) 

538 

539 # Check that there is no file on disk 

540 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of {expectedFile}") 

541 

542 # Check that there is a directory 

543 self.assertTrue(os.path.exists(os.path.dirname(expectedFile)), 

544 f"Check for existence of directory {os.path.dirname(expectedFile)}") 

545 

546 # Force YamlFormatter and check that this time a file is written 

547 datastore.formatterFactory.registerFormatter(ref.datasetType, YamlFormatter, 

548 overwrite=True) 

549 datastore.put(metrics, ref) 

550 self.assertTrue(os.path.exists(expectedFile), f"Check for existence of {expectedFile}") 

551 datastore.remove(ref) 

552 self.assertFalse(os.path.exists(expectedFile), f"Check for existence of now removed {expectedFile}") 

553 

554 

555class InMemoryDatastoreTestCase(DatastoreTests, unittest.TestCase): 

556 """PosixDatastore specialization""" 

557 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastore.yaml") 

558 uriScheme = "mem:" 

559 hasUnsupportedPut = False 

560 ingestTransferModes = () 

561 isEphemeral = True 

562 rootKeys = None 

563 validationCanFail = False 

564 

565 

566class ChainedDatastoreTestCase(PosixDatastoreTestCase): 

567 """ChainedDatastore specialization using a POSIXDatastore""" 

568 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore.yaml") 

569 hasUnsupportedPut = False 

570 ingestTransferModes = ("copy", "hardlink", "symlink") 

571 isEphemeral = False 

572 rootKeys = (".datastores.1.root", ".datastores.2.root") 

573 validationCanFail = True 

574 

575 

576class ChainedDatastoreMemoryTestCase(InMemoryDatastoreTestCase): 

577 """ChainedDatastore specialization using all InMemoryDatastore""" 

578 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2.yaml") 

579 validationCanFail = False 

580 

581 

582class DatastoreConstraintsTests(DatastoreTestsBase): 

583 """Basic tests of constraints model of Datastores.""" 

584 

585 def testConstraints(self): 

586 """Test constraints model. Assumes that each test class has the 

587 same constraints.""" 

588 metrics = makeExampleMetrics() 

589 datastore = self.makeDatastore() 

590 

591 sc1 = self.storageClassFactory.getStorageClass("StructuredData") 

592 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson") 

593 dimensions = self.universe.extract(("visit", "physical_filter", "instrument")) 

594 dataId = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"} 

595 

596 # Write empty file suitable for ingest check 

597 testfile = tempfile.NamedTemporaryFile() 

598 for datasetTypeName, sc, accepted in (("metric", sc1, True), ("metric2", sc1, False), 

599 ("metric33", sc1, True), ("metric2", sc2, True)): 

600 with self.subTest(datasetTypeName=datasetTypeName): 

601 ref = self.makeDatasetRef(datasetTypeName, dimensions, sc, dataId, conform=False) 

602 if accepted: 

603 datastore.put(metrics, ref) 

604 self.assertTrue(datastore.exists(ref)) 

605 datastore.remove(ref) 

606 

607 # Try ingest 

608 if self.canIngest: 

609 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink") 

610 self.assertTrue(datastore.exists(ref)) 

611 datastore.remove(ref) 

612 else: 

613 with self.assertRaises(DatasetTypeNotSupportedError): 

614 datastore.put(metrics, ref) 

615 self.assertFalse(datastore.exists(ref)) 

616 

617 # Again with ingest 

618 if self.canIngest: 

619 with self.assertRaises(DatasetTypeNotSupportedError): 

620 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink") 

621 self.assertFalse(datastore.exists(ref)) 

622 

623 

624class PosixDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase): 

625 """PosixDatastore specialization""" 

626 configFile = os.path.join(TESTDIR, "config/basic/posixDatastoreP.yaml") 

627 canIngest = True 

628 

629 def setUp(self): 

630 # Override the working directory before calling the base class 

631 self.root = tempfile.mkdtemp(dir=TESTDIR) 

632 super().setUp() 

633 

634 

635class InMemoryDatastoreConstraintsTestCase(DatastoreConstraintsTests, unittest.TestCase): 

636 """InMemoryDatastore specialization""" 

637 configFile = os.path.join(TESTDIR, "config/basic/inMemoryDatastoreP.yaml") 

638 canIngest = False 

639 

640 

641class ChainedDatastoreConstraintsNativeTestCase(PosixDatastoreConstraintsTestCase): 

642 """ChainedDatastore specialization using a POSIXDatastore and constraints 

643 at the ChainedDatstore """ 

644 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePa.yaml") 

645 

646 

647class ChainedDatastoreConstraintsTestCase(PosixDatastoreConstraintsTestCase): 

648 """ChainedDatastore specialization using a POSIXDatastore""" 

649 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastoreP.yaml") 

650 

651 

652class ChainedDatastoreMemoryConstraintsTestCase(InMemoryDatastoreConstraintsTestCase): 

653 """ChainedDatastore specialization using all InMemoryDatastore""" 

654 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastore2P.yaml") 

655 canIngest = False 

656 

657 

658class ChainedDatastorePerStoreConstraintsTests(DatastoreTestsBase, unittest.TestCase): 

659 """Test that a chained datastore can control constraints per-datastore 

660 even if child datastore would accept.""" 

661 

662 configFile = os.path.join(TESTDIR, "config/basic/chainedDatastorePb.yaml") 

663 

664 def setUp(self): 

665 # Override the working directory before calling the base class 

666 self.root = tempfile.mkdtemp(dir=TESTDIR) 

667 super().setUp() 

668 

669 def testConstraints(self): 

670 """Test chained datastore constraints model.""" 

671 metrics = makeExampleMetrics() 

672 datastore = self.makeDatastore() 

673 

674 sc1 = self.storageClassFactory.getStorageClass("StructuredData") 

675 sc2 = self.storageClassFactory.getStorageClass("StructuredDataJson") 

676 dimensions = self.universe.extract(("visit", "physical_filter", "instrument")) 

677 dataId1 = {"visit": 52, "physical_filter": "V", "instrument": "DummyCamComp"} 

678 dataId2 = {"visit": 52, "physical_filter": "V", "instrument": "HSC"} 

679 

680 # Write empty file suitable for ingest check 

681 testfile = tempfile.NamedTemporaryFile() 

682 

683 for typeName, dataId, sc, accept, ingest in (("metric", dataId1, sc1, (False, True, False), True), 

684 ("metric2", dataId1, sc1, (False, False, False), False), 

685 ("metric2", dataId2, sc1, (True, False, False), False), 

686 ("metric33", dataId2, sc2, (True, True, False), True), 

687 ("metric2", dataId1, sc2, (False, True, False), True)): 

688 with self.subTest(datasetTypeName=typeName, dataId=dataId, sc=sc.name): 

689 ref = self.makeDatasetRef(typeName, dimensions, sc, dataId, 

690 conform=False) 

691 if any(accept): 

692 datastore.put(metrics, ref) 

693 self.assertTrue(datastore.exists(ref)) 

694 

695 # Check each datastore inside the chained datastore 

696 for childDatastore, expected in zip(datastore.datastores, accept): 

697 self.assertEqual(childDatastore.exists(ref), expected, 

698 f"Testing presence of {ref} in datastore {childDatastore.name}") 

699 

700 datastore.remove(ref) 

701 

702 # Check that ingest works 

703 if ingest: 

704 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink") 

705 self.assertTrue(datastore.exists(ref)) 

706 

707 # Check each datastore inside the chained datastore 

708 for childDatastore, expected in zip(datastore.datastores, accept): 

709 # Ephemeral datastores means InMemory at the moment 

710 # and that does not accept ingest of files. 

711 if childDatastore.isEphemeral: 

712 expected = False 

713 self.assertEqual(childDatastore.exists(ref), expected, 

714 f"Testing presence of ingested {ref} in datastore" 

715 f" {childDatastore.name}") 

716 

717 datastore.remove(ref) 

718 else: 

719 with self.assertRaises(DatasetTypeNotSupportedError): 

720 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink") 

721 

722 else: 

723 with self.assertRaises(DatasetTypeNotSupportedError): 

724 datastore.put(metrics, ref) 

725 self.assertFalse(datastore.exists(ref)) 

726 

727 # Again with ingest 

728 with self.assertRaises(DatasetTypeNotSupportedError): 

729 datastore.ingest(FileDataset(testfile.name, [ref]), transfer="symlink") 

730 self.assertFalse(datastore.exists(ref)) 

731 

732 

733if __name__ == "__main__": 733 ↛ 734line 733 didn't jump to line 734, because the condition on line 733 was never true

734 unittest.main()