Coverage for tests/test_datasets.py: 8%
299 statements
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-05 03:17 -0700
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-05 03:17 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import copy
23import pickle
24import unittest
25import uuid
27from lsst.daf.butler import (
28 DataCoordinate,
29 DatasetIdFactory,
30 DatasetRef,
31 DatasetType,
32 DimensionUniverse,
33 StorageClass,
34 StorageClassFactory,
35)
37"""Tests for datasets module.
38"""
41class DatasetTypeTestCase(unittest.TestCase):
42 """Test for DatasetType."""
44 def setUp(self):
45 self.universe = DimensionUniverse()
47 def testConstructor(self):
48 """Test construction preserves values.
50 Note that construction doesn't check for valid storageClass.
51 This can only be verified for a particular schema.
52 """
53 datasetTypeName = "test"
54 storageClass = StorageClass("test_StructuredData")
55 dimensions = self.universe.extract(("visit", "instrument"))
56 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
57 self.assertEqual(datasetType.name, datasetTypeName)
58 self.assertEqual(datasetType.storageClass, storageClass)
59 self.assertEqual(datasetType.dimensions, dimensions)
61 with self.assertRaises(ValueError, msg="Construct component without parent storage class"):
62 DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, storageClass)
63 with self.assertRaises(ValueError, msg="Construct non-component with parent storage class"):
64 DatasetType(datasetTypeName, dimensions, storageClass, parentStorageClass="NotAllowed")
66 def testConstructor2(self):
67 """Test construction from StorageClass name."""
68 datasetTypeName = "test"
69 storageClass = StorageClass("test_constructor2")
70 StorageClassFactory().registerStorageClass(storageClass)
71 dimensions = self.universe.extract(("instrument", "visit"))
72 datasetType = DatasetType(datasetTypeName, dimensions, "test_constructor2")
73 self.assertEqual(datasetType.name, datasetTypeName)
74 self.assertEqual(datasetType.storageClass, storageClass)
75 self.assertEqual(datasetType.dimensions, dimensions)
77 def testNameValidation(self):
78 """Test that dataset type names only contain certain characters
79 in certain positions.
80 """
81 dimensions = self.universe.extract(("instrument", "visit"))
82 goodNames = ("a", "A", "z1", "Z1", "a_1B", "A_1b", "_a")
83 badNames = ("1", "a%b", "B+Z", "T[0]")
85 # Construct storage class with all the good names included as
86 # components so that we can test internal consistency
87 storageClass = StorageClass(
88 "test_StructuredData", components={n: StorageClass("component") for n in goodNames}
89 )
91 for name in goodNames:
92 composite = DatasetType(name, dimensions, storageClass)
93 self.assertEqual(composite.name, name)
94 for suffix in goodNames:
95 full = DatasetType.nameWithComponent(name, suffix)
96 component = composite.makeComponentDatasetType(suffix)
97 self.assertEqual(component.name, full)
98 self.assertEqual(component.parentStorageClass.name, "test_StructuredData")
99 for suffix in badNames:
100 full = DatasetType.nameWithComponent(name, suffix)
101 with self.subTest(full=full):
102 with self.assertRaises(ValueError):
103 DatasetType(full, dimensions, storageClass)
104 for name in badNames:
105 with self.subTest(name=name):
106 with self.assertRaises(ValueError):
107 DatasetType(name, dimensions, storageClass)
109 def testEquality(self):
110 storageA = StorageClass("test_a")
111 storageB = StorageClass("test_b")
112 parent = StorageClass("test")
113 dimensionsA = self.universe.extract(["instrument"])
114 dimensionsB = self.universe.extract(["skymap"])
115 self.assertEqual(
116 DatasetType(
117 "a",
118 dimensionsA,
119 storageA,
120 ),
121 DatasetType(
122 "a",
123 dimensionsA,
124 storageA,
125 ),
126 )
127 self.assertEqual(
128 DatasetType(
129 "a",
130 dimensionsA,
131 "test_a",
132 ),
133 DatasetType(
134 "a",
135 dimensionsA,
136 storageA,
137 ),
138 )
139 self.assertEqual(
140 DatasetType(
141 "a",
142 dimensionsA,
143 storageA,
144 ),
145 DatasetType(
146 "a",
147 dimensionsA,
148 "test_a",
149 ),
150 )
151 self.assertEqual(
152 DatasetType(
153 "a",
154 dimensionsA,
155 "test_a",
156 ),
157 DatasetType(
158 "a",
159 dimensionsA,
160 "test_a",
161 ),
162 )
163 self.assertEqual(
164 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
165 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
166 )
167 self.assertEqual(
168 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
169 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
170 )
171 self.assertNotEqual(
172 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=True),
173 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=False),
174 )
175 self.assertNotEqual(
176 DatasetType(
177 "a",
178 dimensionsA,
179 storageA,
180 ),
181 DatasetType(
182 "b",
183 dimensionsA,
184 storageA,
185 ),
186 )
187 self.assertNotEqual(
188 DatasetType(
189 "a",
190 dimensionsA,
191 storageA,
192 ),
193 DatasetType(
194 "b",
195 dimensionsA,
196 "test_a",
197 ),
198 )
199 self.assertNotEqual(
200 DatasetType(
201 "a",
202 dimensionsA,
203 storageA,
204 ),
205 DatasetType(
206 "a",
207 dimensionsA,
208 storageB,
209 ),
210 )
211 self.assertNotEqual(
212 DatasetType(
213 "a",
214 dimensionsA,
215 storageA,
216 ),
217 DatasetType(
218 "a",
219 dimensionsA,
220 "test_b",
221 ),
222 )
223 self.assertNotEqual(
224 DatasetType(
225 "a",
226 dimensionsA,
227 storageA,
228 ),
229 DatasetType(
230 "a",
231 dimensionsB,
232 storageA,
233 ),
234 )
235 self.assertNotEqual(
236 DatasetType(
237 "a",
238 dimensionsA,
239 storageA,
240 ),
241 DatasetType(
242 "a",
243 dimensionsB,
244 "test_a",
245 ),
246 )
247 self.assertNotEqual(
248 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageA),
249 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageB),
250 )
251 self.assertNotEqual(
252 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageA"),
253 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageB"),
254 )
256 def testCompatibility(self):
257 storageA = StorageClass("test_a", pytype=set, converters={"list": "builtins.set"})
258 storageB = StorageClass("test_b", pytype=list)
259 storageC = StorageClass("test_c", pytype=dict)
260 self.assertTrue(storageA.can_convert(storageB))
261 dimensionsA = self.universe.extract(["instrument"])
263 dA = DatasetType("a", dimensionsA, storageA)
264 dA2 = DatasetType("a", dimensionsA, storageB)
265 self.assertNotEqual(dA, dA2)
266 self.assertTrue(dA.is_compatible_with(dA))
267 self.assertTrue(dA.is_compatible_with(dA2))
268 self.assertFalse(dA2.is_compatible_with(dA))
270 dA3 = DatasetType("a", dimensionsA, storageC)
271 self.assertFalse(dA.is_compatible_with(dA3))
273 def testOverrideStorageClass(self):
274 storageA = StorageClass("test_a", pytype=list, converters={"dict": "builtins.list"})
275 storageB = StorageClass("test_b", pytype=dict)
276 dimensions = self.universe.extract(["instrument"])
278 dA = DatasetType("a", dimensions, storageA)
279 dB = dA.overrideStorageClass(storageB)
280 self.assertNotEqual(dA, dB)
281 self.assertEqual(dB.storageClass, storageB)
283 round_trip = dB.overrideStorageClass(storageA)
284 self.assertEqual(round_trip, dA)
286 # Check that parents move over.
287 parent = StorageClass("composite", components={"a": storageA, "c": storageA})
288 dP = DatasetType("comp", dimensions, parent)
289 dP_A = dP.makeComponentDatasetType("a")
290 print(dP_A)
291 dp_B = dP_A.overrideStorageClass(storageB)
292 self.assertEqual(dp_B.storageClass, storageB)
293 self.assertEqual(dp_B.parentStorageClass, parent)
295 def testJson(self):
296 storageA = StorageClass("test_a")
297 dimensionsA = self.universe.extract(["instrument"])
298 self.assertEqual(
299 DatasetType(
300 "a",
301 dimensionsA,
302 storageA,
303 ),
304 DatasetType.from_json(
305 DatasetType(
306 "a",
307 dimensionsA,
308 storageA,
309 ).to_json(),
310 self.universe,
311 ),
312 )
313 self.assertEqual(
314 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
315 DatasetType.from_json(
316 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent").to_json(),
317 self.universe,
318 ),
319 )
321 def testSorting(self):
322 """Can we sort a DatasetType"""
323 storage = StorageClass("test_a")
324 dimensions = self.universe.extract(["instrument"])
326 d_a = DatasetType("a", dimensions, storage)
327 d_f = DatasetType("f", dimensions, storage)
328 d_p = DatasetType("p", dimensions, storage)
330 sort = sorted([d_p, d_f, d_a])
331 self.assertEqual(sort, [d_a, d_f, d_p])
333 # Now with strings
334 with self.assertRaises(TypeError):
335 sort = sorted(["z", d_p, "c", d_f, d_a, "d"])
337 def testHashability(self):
338 """Test `DatasetType.__hash__`.
340 This test is performed by checking that `DatasetType` entries can
341 be inserted into a `set` and that unique values of its
342 (`name`, `storageClass`, `dimensions`) parameters result in separate
343 entries (and equal ones don't).
345 This does not check for uniformity of hashing or the actual values
346 of the hash function.
347 """
348 types = []
349 unique = 0
350 storageC = StorageClass("test_c")
351 storageD = StorageClass("test_d")
352 for name in ["a", "b"]:
353 for storageClass in [storageC, storageD]:
354 for dimensions in [("instrument",), ("skymap",)]:
355 datasetType = DatasetType(name, self.universe.extract(dimensions), storageClass)
356 datasetTypeCopy = DatasetType(name, self.universe.extract(dimensions), storageClass)
357 types.extend((datasetType, datasetTypeCopy))
358 unique += 1 # datasetType should always equal its copy
359 self.assertEqual(len(set(types)), unique) # all other combinations are unique
361 # also check that hashes of instances constructed with StorageClass
362 # name matches hashes of instances constructed with instances
363 dimensions = self.universe.extract(["instrument"])
364 self.assertEqual(
365 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_c"))
366 )
367 self.assertEqual(
368 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_c"))
369 )
370 self.assertNotEqual(
371 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_d"))
372 )
373 self.assertNotEqual(
374 hash(DatasetType("a", dimensions, storageD)), hash(DatasetType("a", dimensions, "test_c"))
375 )
376 self.assertNotEqual(
377 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_d"))
378 )
380 def testDeepCopy(self):
381 """Test that we can copy a dataset type."""
382 storageClass = StorageClass("test_copy")
383 datasetTypeName = "test"
384 dimensions = self.universe.extract(("instrument", "visit"))
385 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
386 dcopy = copy.deepcopy(datasetType)
387 self.assertEqual(dcopy, datasetType)
389 # Now with calibration flag set
390 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
391 dcopy = copy.deepcopy(datasetType)
392 self.assertEqual(dcopy, datasetType)
393 self.assertTrue(dcopy.isCalibration())
395 # And again with a composite
396 componentStorageClass = StorageClass("copy_component")
397 componentDatasetType = DatasetType(
398 DatasetType.nameWithComponent(datasetTypeName, "comp"),
399 dimensions,
400 componentStorageClass,
401 parentStorageClass=storageClass,
402 )
403 dcopy = copy.deepcopy(componentDatasetType)
404 self.assertEqual(dcopy, componentDatasetType)
406 def testPickle(self):
407 """Test pickle support."""
408 storageClass = StorageClass("test_pickle")
409 datasetTypeName = "test"
410 dimensions = self.universe.extract(("instrument", "visit"))
411 # Un-pickling requires that storage class is registered with factory.
412 StorageClassFactory().registerStorageClass(storageClass)
413 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
414 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
415 self.assertIsInstance(datasetTypeOut, DatasetType)
416 self.assertEqual(datasetType.name, datasetTypeOut.name)
417 self.assertEqual(datasetType.dimensions.names, datasetTypeOut.dimensions.names)
418 self.assertEqual(datasetType.storageClass, datasetTypeOut.storageClass)
419 self.assertIsNone(datasetTypeOut.parentStorageClass)
420 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
421 self.assertFalse(datasetTypeOut.isCalibration())
423 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
424 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
425 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
426 self.assertTrue(datasetTypeOut.isCalibration())
428 # And again with a composite
429 componentStorageClass = StorageClass("pickle_component")
430 StorageClassFactory().registerStorageClass(componentStorageClass)
431 componentDatasetType = DatasetType(
432 DatasetType.nameWithComponent(datasetTypeName, "comp"),
433 dimensions,
434 componentStorageClass,
435 parentStorageClass=storageClass,
436 )
437 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
438 self.assertIsInstance(datasetTypeOut, DatasetType)
439 self.assertEqual(componentDatasetType.name, datasetTypeOut.name)
440 self.assertEqual(componentDatasetType.dimensions.names, datasetTypeOut.dimensions.names)
441 self.assertEqual(componentDatasetType.storageClass, datasetTypeOut.storageClass)
442 self.assertEqual(componentDatasetType.parentStorageClass, datasetTypeOut.parentStorageClass)
443 self.assertEqual(datasetTypeOut.parentStorageClass.name, storageClass.name)
444 self.assertEqual(datasetTypeOut, componentDatasetType)
446 # Now with a string and not a real storage class to test that
447 # pickling doesn't force the StorageClass to be resolved
448 componentDatasetType = DatasetType(
449 DatasetType.nameWithComponent(datasetTypeName, "comp"),
450 dimensions,
451 "StrangeComponent",
452 parentStorageClass="UnknownParent",
453 )
454 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
455 self.assertEqual(datasetTypeOut, componentDatasetType)
456 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
458 # Now with a storage class that is created by the factory
459 factoryStorageClassClass = StorageClassFactory.makeNewStorageClass("ParentClass")
460 factoryComponentStorageClassClass = StorageClassFactory.makeNewStorageClass("ComponentClass")
461 componentDatasetType = DatasetType(
462 DatasetType.nameWithComponent(datasetTypeName, "comp"),
463 dimensions,
464 factoryComponentStorageClassClass(),
465 parentStorageClass=factoryStorageClassClass(),
466 )
467 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
468 self.assertEqual(datasetTypeOut, componentDatasetType)
469 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
471 def test_composites(self):
472 """Test components within composite DatasetTypes."""
473 storageClassA = StorageClass("compA")
474 storageClassB = StorageClass("compB")
475 storageClass = StorageClass(
476 "test_composite", components={"compA": storageClassA, "compB": storageClassB}
477 )
478 self.assertTrue(storageClass.isComposite())
479 self.assertFalse(storageClassA.isComposite())
480 self.assertFalse(storageClassB.isComposite())
482 dimensions = self.universe.extract(("instrument", "visit"))
484 datasetTypeComposite = DatasetType("composite", dimensions, storageClass)
485 datasetTypeComponentA = datasetTypeComposite.makeComponentDatasetType("compA")
486 datasetTypeComponentB = datasetTypeComposite.makeComponentDatasetType("compB")
488 self.assertTrue(datasetTypeComposite.isComposite())
489 self.assertFalse(datasetTypeComponentA.isComposite())
490 self.assertTrue(datasetTypeComponentB.isComponent())
491 self.assertFalse(datasetTypeComposite.isComponent())
493 self.assertEqual(datasetTypeComposite.name, "composite")
494 self.assertEqual(datasetTypeComponentA.name, "composite.compA")
495 self.assertEqual(datasetTypeComponentB.component(), "compB")
496 self.assertEqual(datasetTypeComposite.nameAndComponent(), ("composite", None))
497 self.assertEqual(datasetTypeComponentA.nameAndComponent(), ("composite", "compA"))
499 self.assertEqual(datasetTypeComponentA.parentStorageClass, storageClass)
500 self.assertEqual(datasetTypeComponentB.parentStorageClass, storageClass)
501 self.assertIsNone(datasetTypeComposite.parentStorageClass)
503 with self.assertRaises(KeyError):
504 datasetTypeComposite.makeComponentDatasetType("compF")
507class DatasetRefTestCase(unittest.TestCase):
508 """Test for DatasetRef."""
510 def setUp(self):
511 self.universe = DimensionUniverse()
512 datasetTypeName = "test"
513 self.componentStorageClass1 = StorageClass("Component1")
514 self.componentStorageClass2 = StorageClass("Component2")
515 self.parentStorageClass = StorageClass(
516 "Parent", components={"a": self.componentStorageClass1, "b": self.componentStorageClass2}
517 )
518 dimensions = self.universe.extract(("instrument", "visit"))
519 self.dataId = dict(instrument="DummyCam", visit=42)
520 self.datasetType = DatasetType(datasetTypeName, dimensions, self.parentStorageClass)
522 def testConstructor(self):
523 """Test that construction preserves and validates values."""
524 # Construct an unresolved ref.
525 ref = DatasetRef(self.datasetType, self.dataId)
526 self.assertEqual(ref.datasetType, self.datasetType)
527 self.assertEqual(
528 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
529 )
530 self.assertIsInstance(ref.dataId, DataCoordinate)
532 # Constructing a ref with an id but no run should fail.
533 with self.assertRaises(ValueError):
534 DatasetRef(self.datasetType, self.dataId, id=uuid.uuid4())
535 # Constructing an unresolved ref with run and/or components should
536 # issue a ref with an id.
537 run = "somerun"
538 ref = DatasetRef(self.datasetType, self.dataId, run=run)
539 self.assertIsNotNone(ref.id)
541 # Passing a data ID that is missing dimensions should fail.
542 with self.assertRaises(KeyError):
543 DatasetRef(self.datasetType, {"instrument": "DummyCam"})
544 # Constructing a resolved ref should preserve run as well as everything
545 # else.
546 id_ = uuid.uuid4()
547 ref = DatasetRef(self.datasetType, self.dataId, id=id_, run=run)
548 self.assertEqual(ref.datasetType, self.datasetType)
549 self.assertEqual(
550 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
551 )
552 self.assertIsInstance(ref.dataId, DataCoordinate)
553 self.assertEqual(ref.id, id_)
554 self.assertEqual(ref.run, run)
556 with self.assertRaises(ValueError):
557 DatasetRef(self.datasetType, self.dataId, run=run, id_generation_mode=42)
559 def testSorting(self):
560 """Can we sort a DatasetRef"""
561 ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=1))
562 ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10))
563 ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=22))
565 # Enable detailed diff report
566 self.maxDiff = None
568 # This will sort them on visit number
569 sort = sorted([ref3, ref1, ref2])
570 self.assertEqual(sort, [ref1, ref2, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
572 # Now include a run
573 ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=43), run="b")
574 self.assertEqual(ref1.run, "b")
575 ref4 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10), run="b")
576 ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=4), run="a")
577 ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=104), run="c")
579 # This will sort them on run before visit
580 sort = sorted([ref3, ref1, ref2, ref4])
581 self.assertEqual(sort, [ref2, ref4, ref1, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
583 # Now with strings
584 with self.assertRaises(TypeError):
585 sort = sorted(["z", ref1, "c"])
587 def testResolving(self):
588 id_ = uuid.uuid4()
589 ref = DatasetRef(self.datasetType, self.dataId, id=id_, run="somerun")
590 unresolvedRef = ref.unresolved()
591 self.assertIsNotNone(ref.id)
592 self.assertIsNone(unresolvedRef.id)
593 self.assertIsNone(unresolvedRef.run)
594 self.assertNotEqual(ref, unresolvedRef)
595 self.assertEqual(ref.unresolved(), unresolvedRef)
596 self.assertEqual(ref.datasetType, unresolvedRef.datasetType)
597 self.assertEqual(ref.dataId, unresolvedRef.dataId)
598 reresolvedRef = unresolvedRef.resolved(id=id_, run="somerun")
599 self.assertEqual(ref, reresolvedRef)
600 self.assertEqual(reresolvedRef.unresolved(), unresolvedRef)
601 self.assertIsNotNone(reresolvedRef.run)
603 other_resolved = DatasetIdFactory().resolveRef(unresolvedRef, "somerun")
604 self.assertEqual(other_resolved.run, "somerun")
606 def testOverrideStorageClass(self):
607 storageA = StorageClass("test_a", pytype=list)
609 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
611 ref_new = ref.overrideStorageClass(storageA)
612 self.assertNotEqual(ref, ref_new)
613 self.assertEqual(ref_new.datasetType.storageClass, storageA)
614 self.assertEqual(ref_new.overrideStorageClass(ref.datasetType.storageClass), ref)
616 incompatible_sc = StorageClass("my_int", pytype=int)
617 with self.assertRaises(ValueError):
618 # Do not test against "ref" because it has a default storage class
619 # of "object" which is compatible with everything.
620 ref_new.overrideStorageClass(incompatible_sc)
622 def testPickle(self):
623 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
624 s = pickle.dumps(ref)
625 self.assertEqual(pickle.loads(s), ref)
627 def testJson(self):
628 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
629 s = ref.to_json()
630 self.assertEqual(DatasetRef.from_json(s, universe=self.universe), ref)
633if __name__ == "__main__":
634 unittest.main()