Coverage for tests/test_datasets.py: 9%

292 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-10-12 09:01 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22import copy 

23import pickle 

24import unittest 

25 

26from lsst.daf.butler import ( 

27 DataCoordinate, 

28 DatasetRef, 

29 DatasetType, 

30 DimensionUniverse, 

31 StorageClass, 

32 StorageClassFactory, 

33) 

34 

35"""Tests for datasets module. 

36""" 

37 

38 

39class DatasetTypeTestCase(unittest.TestCase): 

40 """Test for DatasetType.""" 

41 

42 def setUp(self): 

43 self.universe = DimensionUniverse() 

44 

45 def testConstructor(self): 

46 """Test construction preserves values. 

47 

48 Note that construction doesn't check for valid storageClass. 

49 This can only be verified for a particular schema. 

50 """ 

51 datasetTypeName = "test" 

52 storageClass = StorageClass("test_StructuredData") 

53 dimensions = self.universe.extract(("visit", "instrument")) 

54 datasetType = DatasetType(datasetTypeName, dimensions, storageClass) 

55 self.assertEqual(datasetType.name, datasetTypeName) 

56 self.assertEqual(datasetType.storageClass, storageClass) 

57 self.assertEqual(datasetType.dimensions, dimensions) 

58 

59 with self.assertRaises(ValueError, msg="Construct component without parent storage class"): 

60 DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, storageClass) 

61 with self.assertRaises(ValueError, msg="Construct non-component with parent storage class"): 

62 DatasetType(datasetTypeName, dimensions, storageClass, parentStorageClass="NotAllowed") 

63 

64 def testConstructor2(self): 

65 """Test construction from StorageClass name.""" 

66 datasetTypeName = "test" 

67 storageClass = StorageClass("test_constructor2") 

68 StorageClassFactory().registerStorageClass(storageClass) 

69 dimensions = self.universe.extract(("instrument", "visit")) 

70 datasetType = DatasetType(datasetTypeName, dimensions, "test_constructor2") 

71 self.assertEqual(datasetType.name, datasetTypeName) 

72 self.assertEqual(datasetType.storageClass, storageClass) 

73 self.assertEqual(datasetType.dimensions, dimensions) 

74 

75 def testNameValidation(self): 

76 """Test that dataset type names only contain certain characters 

77 in certain positions. 

78 """ 

79 dimensions = self.universe.extract(("instrument", "visit")) 

80 goodNames = ("a", "A", "z1", "Z1", "a_1B", "A_1b", "_a") 

81 badNames = ("1", "a%b", "B+Z", "T[0]") 

82 

83 # Construct storage class with all the good names included as 

84 # components so that we can test internal consistency 

85 storageClass = StorageClass( 

86 "test_StructuredData", components={n: StorageClass("component") for n in goodNames} 

87 ) 

88 

89 for name in goodNames: 

90 composite = DatasetType(name, dimensions, storageClass) 

91 self.assertEqual(composite.name, name) 

92 for suffix in goodNames: 

93 full = DatasetType.nameWithComponent(name, suffix) 

94 component = composite.makeComponentDatasetType(suffix) 

95 self.assertEqual(component.name, full) 

96 self.assertEqual(component.parentStorageClass.name, "test_StructuredData") 

97 for suffix in badNames: 

98 full = DatasetType.nameWithComponent(name, suffix) 

99 with self.subTest(full=full): 

100 with self.assertRaises(ValueError): 

101 DatasetType(full, dimensions, storageClass) 

102 for name in badNames: 

103 with self.subTest(name=name): 

104 with self.assertRaises(ValueError): 

105 DatasetType(name, dimensions, storageClass) 

106 

107 def testEquality(self): 

108 storageA = StorageClass("test_a") 

109 storageB = StorageClass("test_b") 

110 parent = StorageClass("test") 

111 dimensionsA = self.universe.extract(["instrument"]) 

112 dimensionsB = self.universe.extract(["skymap"]) 

113 self.assertEqual( 

114 DatasetType( 

115 "a", 

116 dimensionsA, 

117 storageA, 

118 ), 

119 DatasetType( 

120 "a", 

121 dimensionsA, 

122 storageA, 

123 ), 

124 ) 

125 self.assertEqual( 

126 DatasetType( 

127 "a", 

128 dimensionsA, 

129 "test_a", 

130 ), 

131 DatasetType( 

132 "a", 

133 dimensionsA, 

134 storageA, 

135 ), 

136 ) 

137 self.assertEqual( 

138 DatasetType( 

139 "a", 

140 dimensionsA, 

141 storageA, 

142 ), 

143 DatasetType( 

144 "a", 

145 dimensionsA, 

146 "test_a", 

147 ), 

148 ) 

149 self.assertEqual( 

150 DatasetType( 

151 "a", 

152 dimensionsA, 

153 "test_a", 

154 ), 

155 DatasetType( 

156 "a", 

157 dimensionsA, 

158 "test_a", 

159 ), 

160 ) 

161 self.assertEqual( 

162 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent), 

163 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent), 

164 ) 

165 self.assertEqual( 

166 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"), 

167 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"), 

168 ) 

169 self.assertNotEqual( 

170 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=True), 

171 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=False), 

172 ) 

173 self.assertNotEqual( 

174 DatasetType( 

175 "a", 

176 dimensionsA, 

177 storageA, 

178 ), 

179 DatasetType( 

180 "b", 

181 dimensionsA, 

182 storageA, 

183 ), 

184 ) 

185 self.assertNotEqual( 

186 DatasetType( 

187 "a", 

188 dimensionsA, 

189 storageA, 

190 ), 

191 DatasetType( 

192 "b", 

193 dimensionsA, 

194 "test_a", 

195 ), 

196 ) 

197 self.assertNotEqual( 

198 DatasetType( 

199 "a", 

200 dimensionsA, 

201 storageA, 

202 ), 

203 DatasetType( 

204 "a", 

205 dimensionsA, 

206 storageB, 

207 ), 

208 ) 

209 self.assertNotEqual( 

210 DatasetType( 

211 "a", 

212 dimensionsA, 

213 storageA, 

214 ), 

215 DatasetType( 

216 "a", 

217 dimensionsA, 

218 "test_b", 

219 ), 

220 ) 

221 self.assertNotEqual( 

222 DatasetType( 

223 "a", 

224 dimensionsA, 

225 storageA, 

226 ), 

227 DatasetType( 

228 "a", 

229 dimensionsB, 

230 storageA, 

231 ), 

232 ) 

233 self.assertNotEqual( 

234 DatasetType( 

235 "a", 

236 dimensionsA, 

237 storageA, 

238 ), 

239 DatasetType( 

240 "a", 

241 dimensionsB, 

242 "test_a", 

243 ), 

244 ) 

245 self.assertNotEqual( 

246 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageA), 

247 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageB), 

248 ) 

249 self.assertNotEqual( 

250 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageA"), 

251 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageB"), 

252 ) 

253 

254 def testCompatibility(self): 

255 storageA = StorageClass("test_a", pytype=set, converters={"list": "builtins.set"}) 

256 storageB = StorageClass("test_b", pytype=list) 

257 storageC = StorageClass("test_c", pytype=dict) 

258 self.assertTrue(storageA.can_convert(storageB)) 

259 dimensionsA = self.universe.extract(["instrument"]) 

260 

261 dA = DatasetType("a", dimensionsA, storageA) 

262 dA2 = DatasetType("a", dimensionsA, storageB) 

263 self.assertNotEqual(dA, dA2) 

264 self.assertTrue(dA.is_compatible_with(dA)) 

265 self.assertTrue(dA.is_compatible_with(dA2)) 

266 self.assertFalse(dA2.is_compatible_with(dA)) 

267 

268 dA3 = DatasetType("a", dimensionsA, storageC) 

269 self.assertFalse(dA.is_compatible_with(dA3)) 

270 

271 def testOverrideStorageClass(self): 

272 storageA = StorageClass("test_a", pytype=list, converters={"dict": "builtins.list"}) 

273 storageB = StorageClass("test_b", pytype=dict) 

274 dimensions = self.universe.extract(["instrument"]) 

275 

276 dA = DatasetType("a", dimensions, storageA) 

277 dB = dA.overrideStorageClass(storageB) 

278 self.assertNotEqual(dA, dB) 

279 self.assertEqual(dB.storageClass, storageB) 

280 

281 round_trip = dB.overrideStorageClass(storageA) 

282 self.assertEqual(round_trip, dA) 

283 

284 # Check that parents move over. 

285 parent = StorageClass("composite", components={"a": storageA, "c": storageA}) 

286 dP = DatasetType("comp", dimensions, parent) 

287 dP_A = dP.makeComponentDatasetType("a") 

288 print(dP_A) 

289 dp_B = dP_A.overrideStorageClass(storageB) 

290 self.assertEqual(dp_B.storageClass, storageB) 

291 self.assertEqual(dp_B.parentStorageClass, parent) 

292 

293 def testJson(self): 

294 storageA = StorageClass("test_a") 

295 dimensionsA = self.universe.extract(["instrument"]) 

296 self.assertEqual( 

297 DatasetType( 

298 "a", 

299 dimensionsA, 

300 storageA, 

301 ), 

302 DatasetType.from_json( 

303 DatasetType( 

304 "a", 

305 dimensionsA, 

306 storageA, 

307 ).to_json(), 

308 self.universe, 

309 ), 

310 ) 

311 self.assertEqual( 

312 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"), 

313 DatasetType.from_json( 

314 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent").to_json(), 

315 self.universe, 

316 ), 

317 ) 

318 

319 def testSorting(self): 

320 """Can we sort a DatasetType""" 

321 storage = StorageClass("test_a") 

322 dimensions = self.universe.extract(["instrument"]) 

323 

324 d_a = DatasetType("a", dimensions, storage) 

325 d_f = DatasetType("f", dimensions, storage) 

326 d_p = DatasetType("p", dimensions, storage) 

327 

328 sort = sorted([d_p, d_f, d_a]) 

329 self.assertEqual(sort, [d_a, d_f, d_p]) 

330 

331 # Now with strings 

332 with self.assertRaises(TypeError): 

333 sort = sorted(["z", d_p, "c", d_f, d_a, "d"]) 

334 

335 def testHashability(self): 

336 """Test `DatasetType.__hash__`. 

337 

338 This test is performed by checking that `DatasetType` entries can 

339 be inserted into a `set` and that unique values of its 

340 (`name`, `storageClass`, `dimensions`) parameters result in separate 

341 entries (and equal ones don't). 

342 

343 This does not check for uniformity of hashing or the actual values 

344 of the hash function. 

345 """ 

346 types = [] 

347 unique = 0 

348 storageC = StorageClass("test_c") 

349 storageD = StorageClass("test_d") 

350 for name in ["a", "b"]: 

351 for storageClass in [storageC, storageD]: 

352 for dimensions in [("instrument",), ("skymap",)]: 

353 datasetType = DatasetType(name, self.universe.extract(dimensions), storageClass) 

354 datasetTypeCopy = DatasetType(name, self.universe.extract(dimensions), storageClass) 

355 types.extend((datasetType, datasetTypeCopy)) 

356 unique += 1 # datasetType should always equal its copy 

357 self.assertEqual(len(set(types)), unique) # all other combinations are unique 

358 

359 # also check that hashes of instances constructed with StorageClass 

360 # name matches hashes of instances constructed with instances 

361 dimensions = self.universe.extract(["instrument"]) 

362 self.assertEqual( 

363 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_c")) 

364 ) 

365 self.assertEqual( 

366 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_c")) 

367 ) 

368 self.assertNotEqual( 

369 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_d")) 

370 ) 

371 self.assertNotEqual( 

372 hash(DatasetType("a", dimensions, storageD)), hash(DatasetType("a", dimensions, "test_c")) 

373 ) 

374 self.assertNotEqual( 

375 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_d")) 

376 ) 

377 

378 def testDeepCopy(self): 

379 """Test that we can copy a dataset type.""" 

380 storageClass = StorageClass("test_copy") 

381 datasetTypeName = "test" 

382 dimensions = self.universe.extract(("instrument", "visit")) 

383 datasetType = DatasetType(datasetTypeName, dimensions, storageClass) 

384 dcopy = copy.deepcopy(datasetType) 

385 self.assertEqual(dcopy, datasetType) 

386 

387 # Now with calibration flag set 

388 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True) 

389 dcopy = copy.deepcopy(datasetType) 

390 self.assertEqual(dcopy, datasetType) 

391 self.assertTrue(dcopy.isCalibration()) 

392 

393 # And again with a composite 

394 componentStorageClass = StorageClass("copy_component") 

395 componentDatasetType = DatasetType( 

396 DatasetType.nameWithComponent(datasetTypeName, "comp"), 

397 dimensions, 

398 componentStorageClass, 

399 parentStorageClass=storageClass, 

400 ) 

401 dcopy = copy.deepcopy(componentDatasetType) 

402 self.assertEqual(dcopy, componentDatasetType) 

403 

404 def testPickle(self): 

405 """Test pickle support.""" 

406 storageClass = StorageClass("test_pickle") 

407 datasetTypeName = "test" 

408 dimensions = self.universe.extract(("instrument", "visit")) 

409 # Un-pickling requires that storage class is registered with factory. 

410 StorageClassFactory().registerStorageClass(storageClass) 

411 datasetType = DatasetType(datasetTypeName, dimensions, storageClass) 

412 datasetTypeOut = pickle.loads(pickle.dumps(datasetType)) 

413 self.assertIsInstance(datasetTypeOut, DatasetType) 

414 self.assertEqual(datasetType.name, datasetTypeOut.name) 

415 self.assertEqual(datasetType.dimensions.names, datasetTypeOut.dimensions.names) 

416 self.assertEqual(datasetType.storageClass, datasetTypeOut.storageClass) 

417 self.assertIsNone(datasetTypeOut.parentStorageClass) 

418 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration()) 

419 self.assertFalse(datasetTypeOut.isCalibration()) 

420 

421 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True) 

422 datasetTypeOut = pickle.loads(pickle.dumps(datasetType)) 

423 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration()) 

424 self.assertTrue(datasetTypeOut.isCalibration()) 

425 

426 # And again with a composite 

427 componentStorageClass = StorageClass("pickle_component") 

428 StorageClassFactory().registerStorageClass(componentStorageClass) 

429 componentDatasetType = DatasetType( 

430 DatasetType.nameWithComponent(datasetTypeName, "comp"), 

431 dimensions, 

432 componentStorageClass, 

433 parentStorageClass=storageClass, 

434 ) 

435 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType)) 

436 self.assertIsInstance(datasetTypeOut, DatasetType) 

437 self.assertEqual(componentDatasetType.name, datasetTypeOut.name) 

438 self.assertEqual(componentDatasetType.dimensions.names, datasetTypeOut.dimensions.names) 

439 self.assertEqual(componentDatasetType.storageClass, datasetTypeOut.storageClass) 

440 self.assertEqual(componentDatasetType.parentStorageClass, datasetTypeOut.parentStorageClass) 

441 self.assertEqual(datasetTypeOut.parentStorageClass.name, storageClass.name) 

442 self.assertEqual(datasetTypeOut, componentDatasetType) 

443 

444 # Now with a string and not a real storage class to test that 

445 # pickling doesn't force the StorageClass to be resolved 

446 componentDatasetType = DatasetType( 

447 DatasetType.nameWithComponent(datasetTypeName, "comp"), 

448 dimensions, 

449 "StrangeComponent", 

450 parentStorageClass="UnknownParent", 

451 ) 

452 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType)) 

453 self.assertEqual(datasetTypeOut, componentDatasetType) 

454 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName) 

455 

456 # Now with a storage class that is created by the factory 

457 factoryStorageClassClass = StorageClassFactory.makeNewStorageClass("ParentClass") 

458 factoryComponentStorageClassClass = StorageClassFactory.makeNewStorageClass("ComponentClass") 

459 componentDatasetType = DatasetType( 

460 DatasetType.nameWithComponent(datasetTypeName, "comp"), 

461 dimensions, 

462 factoryComponentStorageClassClass(), 

463 parentStorageClass=factoryStorageClassClass(), 

464 ) 

465 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType)) 

466 self.assertEqual(datasetTypeOut, componentDatasetType) 

467 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName) 

468 

469 def test_composites(self): 

470 """Test components within composite DatasetTypes.""" 

471 storageClassA = StorageClass("compA") 

472 storageClassB = StorageClass("compB") 

473 storageClass = StorageClass( 

474 "test_composite", components={"compA": storageClassA, "compB": storageClassB} 

475 ) 

476 self.assertTrue(storageClass.isComposite()) 

477 self.assertFalse(storageClassA.isComposite()) 

478 self.assertFalse(storageClassB.isComposite()) 

479 

480 dimensions = self.universe.extract(("instrument", "visit")) 

481 

482 datasetTypeComposite = DatasetType("composite", dimensions, storageClass) 

483 datasetTypeComponentA = datasetTypeComposite.makeComponentDatasetType("compA") 

484 datasetTypeComponentB = datasetTypeComposite.makeComponentDatasetType("compB") 

485 

486 self.assertTrue(datasetTypeComposite.isComposite()) 

487 self.assertFalse(datasetTypeComponentA.isComposite()) 

488 self.assertTrue(datasetTypeComponentB.isComponent()) 

489 self.assertFalse(datasetTypeComposite.isComponent()) 

490 

491 self.assertEqual(datasetTypeComposite.name, "composite") 

492 self.assertEqual(datasetTypeComponentA.name, "composite.compA") 

493 self.assertEqual(datasetTypeComponentB.component(), "compB") 

494 self.assertEqual(datasetTypeComposite.nameAndComponent(), ("composite", None)) 

495 self.assertEqual(datasetTypeComponentA.nameAndComponent(), ("composite", "compA")) 

496 

497 self.assertEqual(datasetTypeComponentA.parentStorageClass, storageClass) 

498 self.assertEqual(datasetTypeComponentB.parentStorageClass, storageClass) 

499 self.assertIsNone(datasetTypeComposite.parentStorageClass) 

500 

501 with self.assertRaises(KeyError): 

502 datasetTypeComposite.makeComponentDatasetType("compF") 

503 

504 

505class DatasetRefTestCase(unittest.TestCase): 

506 """Test for DatasetRef.""" 

507 

508 def setUp(self): 

509 self.universe = DimensionUniverse() 

510 datasetTypeName = "test" 

511 self.componentStorageClass1 = StorageClass("Component1") 

512 self.componentStorageClass2 = StorageClass("Component2") 

513 self.parentStorageClass = StorageClass( 

514 "Parent", components={"a": self.componentStorageClass1, "b": self.componentStorageClass2} 

515 ) 

516 dimensions = self.universe.extract(("instrument", "visit")) 

517 self.dataId = dict(instrument="DummyCam", visit=42) 

518 self.datasetType = DatasetType(datasetTypeName, dimensions, self.parentStorageClass) 

519 

520 def testConstructor(self): 

521 """Test that construction preserves and validates values.""" 

522 # Construct an unresolved ref. 

523 ref = DatasetRef(self.datasetType, self.dataId) 

524 self.assertEqual(ref.datasetType, self.datasetType) 

525 self.assertEqual( 

526 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId 

527 ) 

528 self.assertIsInstance(ref.dataId, DataCoordinate) 

529 # Constructing an unresolved ref with run and/or components should 

530 # fail. 

531 run = "somerun" 

532 with self.assertRaises(ValueError): 

533 DatasetRef(self.datasetType, self.dataId, run=run) 

534 # Passing a data ID that is missing dimensions should fail. 

535 with self.assertRaises(KeyError): 

536 DatasetRef(self.datasetType, {"instrument": "DummyCam"}) 

537 # Constructing a resolved ref should preserve run as well as everything 

538 # else. 

539 ref = DatasetRef(self.datasetType, self.dataId, id=1, run=run) 

540 self.assertEqual(ref.datasetType, self.datasetType) 

541 self.assertEqual( 

542 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId 

543 ) 

544 self.assertIsInstance(ref.dataId, DataCoordinate) 

545 self.assertEqual(ref.id, 1) 

546 self.assertEqual(ref.run, run) 

547 

548 def testSorting(self): 

549 """Can we sort a DatasetRef""" 

550 ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=1)) 

551 ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10)) 

552 ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=22)) 

553 

554 # Enable detailed diff report 

555 self.maxDiff = None 

556 

557 # This will sort them on visit number 

558 sort = sorted([ref3, ref1, ref2]) 

559 self.assertEqual(sort, [ref1, ref2, ref3], msg=f"Got order: {[r.dataId for r in sort]}") 

560 

561 # Now include a run 

562 ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=43), run="b", id=2) 

563 self.assertEqual(ref1.run, "b") 

564 ref4 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10), run="b", id=2) 

565 ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=4), run="a", id=1) 

566 ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=104), run="c", id=3) 

567 

568 # This will sort them on run before visit 

569 sort = sorted([ref3, ref1, ref2, ref4]) 

570 self.assertEqual(sort, [ref2, ref4, ref1, ref3], msg=f"Got order: {[r.dataId for r in sort]}") 

571 

572 # Now with strings 

573 with self.assertRaises(TypeError): 

574 sort = sorted(["z", ref1, "c"]) 

575 

576 def testResolving(self): 

577 ref = DatasetRef(self.datasetType, self.dataId, id=1, run="somerun") 

578 unresolvedRef = ref.unresolved() 

579 self.assertIsNotNone(ref.id) 

580 self.assertIsNone(unresolvedRef.id) 

581 self.assertIsNone(unresolvedRef.run) 

582 self.assertNotEqual(ref, unresolvedRef) 

583 self.assertEqual(ref.unresolved(), unresolvedRef) 

584 self.assertEqual(ref.datasetType, unresolvedRef.datasetType) 

585 self.assertEqual(ref.dataId, unresolvedRef.dataId) 

586 reresolvedRef = unresolvedRef.resolved(id=1, run="somerun") 

587 self.assertEqual(ref, reresolvedRef) 

588 self.assertEqual(reresolvedRef.unresolved(), unresolvedRef) 

589 self.assertIsNotNone(reresolvedRef.run) 

590 

591 def testOverrideStorageClass(self): 

592 storageA = StorageClass("test_a", pytype=list) 

593 

594 ref = DatasetRef(self.datasetType, self.dataId, id=1, run="somerun") 

595 

596 ref_new = ref.overrideStorageClass(storageA) 

597 self.assertNotEqual(ref, ref_new) 

598 self.assertEqual(ref_new.datasetType.storageClass, storageA) 

599 self.assertEqual(ref_new.overrideStorageClass(ref.datasetType.storageClass), ref) 

600 

601 incompatible_sc = StorageClass("my_int", pytype=int) 

602 with self.assertRaises(ValueError): 

603 # Do not test against "ref" because it has a default storage class 

604 # of "object" which is compatible with everything. 

605 ref_new.overrideStorageClass(incompatible_sc) 

606 

607 def testPickle(self): 

608 ref = DatasetRef(self.datasetType, self.dataId, id=1, run="somerun") 

609 s = pickle.dumps(ref) 

610 self.assertEqual(pickle.loads(s), ref) 

611 

612 def testJson(self): 

613 ref = DatasetRef(self.datasetType, self.dataId, id=1, run="somerun") 

614 s = ref.to_json() 

615 self.assertEqual(DatasetRef.from_json(s, universe=self.universe), ref) 

616 

617 

618if __name__ == "__main__": 618 ↛ 619line 618 didn't jump to line 619, because the condition on line 618 was never true

619 unittest.main()