Coverage for tests/test_datasets.py: 8%
306 statements
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-09 02:11 -0700
« prev ^ index » next coverage.py v7.2.5, created at 2023-05-09 02:11 -0700
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (http://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <http://www.gnu.org/licenses/>.
22import copy
23import pickle
24import unittest
25import uuid
27from lsst.daf.butler import (
28 DataCoordinate,
29 DatasetIdFactory,
30 DatasetRef,
31 DatasetType,
32 DimensionUniverse,
33 FileDataset,
34 StorageClass,
35 StorageClassFactory,
36)
38"""Tests for datasets module.
39"""
42class DatasetTypeTestCase(unittest.TestCase):
43 """Test for DatasetType."""
45 def setUp(self):
46 self.universe = DimensionUniverse()
48 def testConstructor(self):
49 """Test construction preserves values.
51 Note that construction doesn't check for valid storageClass.
52 This can only be verified for a particular schema.
53 """
54 datasetTypeName = "test"
55 storageClass = StorageClass("test_StructuredData")
56 dimensions = self.universe.extract(("visit", "instrument"))
57 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
58 self.assertEqual(datasetType.name, datasetTypeName)
59 self.assertEqual(datasetType.storageClass, storageClass)
60 self.assertEqual(datasetType.dimensions, dimensions)
62 with self.assertRaises(ValueError, msg="Construct component without parent storage class"):
63 DatasetType(DatasetType.nameWithComponent(datasetTypeName, "comp"), dimensions, storageClass)
64 with self.assertRaises(ValueError, msg="Construct non-component with parent storage class"):
65 DatasetType(datasetTypeName, dimensions, storageClass, parentStorageClass="NotAllowed")
67 def testConstructor2(self):
68 """Test construction from StorageClass name."""
69 datasetTypeName = "test"
70 storageClass = StorageClass("test_constructor2")
71 StorageClassFactory().registerStorageClass(storageClass)
72 dimensions = self.universe.extract(("instrument", "visit"))
73 datasetType = DatasetType(datasetTypeName, dimensions, "test_constructor2")
74 self.assertEqual(datasetType.name, datasetTypeName)
75 self.assertEqual(datasetType.storageClass, storageClass)
76 self.assertEqual(datasetType.dimensions, dimensions)
78 def testNameValidation(self):
79 """Test that dataset type names only contain certain characters
80 in certain positions.
81 """
82 dimensions = self.universe.extract(("instrument", "visit"))
83 goodNames = ("a", "A", "z1", "Z1", "a_1B", "A_1b", "_a")
84 badNames = ("1", "a%b", "B+Z", "T[0]")
86 # Construct storage class with all the good names included as
87 # components so that we can test internal consistency
88 storageClass = StorageClass(
89 "test_StructuredData", components={n: StorageClass("component") for n in goodNames}
90 )
92 for name in goodNames:
93 composite = DatasetType(name, dimensions, storageClass)
94 self.assertEqual(composite.name, name)
95 for suffix in goodNames:
96 full = DatasetType.nameWithComponent(name, suffix)
97 component = composite.makeComponentDatasetType(suffix)
98 self.assertEqual(component.name, full)
99 self.assertEqual(component.parentStorageClass.name, "test_StructuredData")
100 for suffix in badNames:
101 full = DatasetType.nameWithComponent(name, suffix)
102 with self.subTest(full=full):
103 with self.assertRaises(ValueError):
104 DatasetType(full, dimensions, storageClass)
105 for name in badNames:
106 with self.subTest(name=name):
107 with self.assertRaises(ValueError):
108 DatasetType(name, dimensions, storageClass)
110 def testEquality(self):
111 storageA = StorageClass("test_a")
112 storageB = StorageClass("test_b")
113 parent = StorageClass("test")
114 dimensionsA = self.universe.extract(["instrument"])
115 dimensionsB = self.universe.extract(["skymap"])
116 self.assertEqual(
117 DatasetType(
118 "a",
119 dimensionsA,
120 storageA,
121 ),
122 DatasetType(
123 "a",
124 dimensionsA,
125 storageA,
126 ),
127 )
128 self.assertEqual(
129 DatasetType(
130 "a",
131 dimensionsA,
132 "test_a",
133 ),
134 DatasetType(
135 "a",
136 dimensionsA,
137 storageA,
138 ),
139 )
140 self.assertEqual(
141 DatasetType(
142 "a",
143 dimensionsA,
144 storageA,
145 ),
146 DatasetType(
147 "a",
148 dimensionsA,
149 "test_a",
150 ),
151 )
152 self.assertEqual(
153 DatasetType(
154 "a",
155 dimensionsA,
156 "test_a",
157 ),
158 DatasetType(
159 "a",
160 dimensionsA,
161 "test_a",
162 ),
163 )
164 self.assertEqual(
165 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
166 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=parent),
167 )
168 self.assertEqual(
169 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
170 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
171 )
172 self.assertNotEqual(
173 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=True),
174 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent", isCalibration=False),
175 )
176 self.assertNotEqual(
177 DatasetType(
178 "a",
179 dimensionsA,
180 storageA,
181 ),
182 DatasetType(
183 "b",
184 dimensionsA,
185 storageA,
186 ),
187 )
188 self.assertNotEqual(
189 DatasetType(
190 "a",
191 dimensionsA,
192 storageA,
193 ),
194 DatasetType(
195 "b",
196 dimensionsA,
197 "test_a",
198 ),
199 )
200 self.assertNotEqual(
201 DatasetType(
202 "a",
203 dimensionsA,
204 storageA,
205 ),
206 DatasetType(
207 "a",
208 dimensionsA,
209 storageB,
210 ),
211 )
212 self.assertNotEqual(
213 DatasetType(
214 "a",
215 dimensionsA,
216 storageA,
217 ),
218 DatasetType(
219 "a",
220 dimensionsA,
221 "test_b",
222 ),
223 )
224 self.assertNotEqual(
225 DatasetType(
226 "a",
227 dimensionsA,
228 storageA,
229 ),
230 DatasetType(
231 "a",
232 dimensionsB,
233 storageA,
234 ),
235 )
236 self.assertNotEqual(
237 DatasetType(
238 "a",
239 dimensionsA,
240 storageA,
241 ),
242 DatasetType(
243 "a",
244 dimensionsB,
245 "test_a",
246 ),
247 )
248 self.assertNotEqual(
249 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageA),
250 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass=storageB),
251 )
252 self.assertNotEqual(
253 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageA"),
254 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="storageB"),
255 )
257 def testCompatibility(self):
258 storageA = StorageClass("test_a", pytype=set, converters={"list": "builtins.set"})
259 storageB = StorageClass("test_b", pytype=list)
260 storageC = StorageClass("test_c", pytype=dict)
261 self.assertTrue(storageA.can_convert(storageB))
262 dimensionsA = self.universe.extract(["instrument"])
264 dA = DatasetType("a", dimensionsA, storageA)
265 dA2 = DatasetType("a", dimensionsA, storageB)
266 self.assertNotEqual(dA, dA2)
267 self.assertTrue(dA.is_compatible_with(dA))
268 self.assertTrue(dA.is_compatible_with(dA2))
269 self.assertFalse(dA2.is_compatible_with(dA))
271 dA3 = DatasetType("a", dimensionsA, storageC)
272 self.assertFalse(dA.is_compatible_with(dA3))
274 def testOverrideStorageClass(self):
275 storageA = StorageClass("test_a", pytype=list, converters={"dict": "builtins.list"})
276 storageB = StorageClass("test_b", pytype=dict)
277 dimensions = self.universe.extract(["instrument"])
279 dA = DatasetType("a", dimensions, storageA)
280 dB = dA.overrideStorageClass(storageB)
281 self.assertNotEqual(dA, dB)
282 self.assertEqual(dB.storageClass, storageB)
284 round_trip = dB.overrideStorageClass(storageA)
285 self.assertEqual(round_trip, dA)
287 # Check that parents move over.
288 parent = StorageClass("composite", components={"a": storageA, "c": storageA})
289 dP = DatasetType("comp", dimensions, parent)
290 dP_A = dP.makeComponentDatasetType("a")
291 print(dP_A)
292 dp_B = dP_A.overrideStorageClass(storageB)
293 self.assertEqual(dp_B.storageClass, storageB)
294 self.assertEqual(dp_B.parentStorageClass, parent)
296 def testJson(self):
297 storageA = StorageClass("test_a")
298 dimensionsA = self.universe.extract(["instrument"])
299 self.assertEqual(
300 DatasetType(
301 "a",
302 dimensionsA,
303 storageA,
304 ),
305 DatasetType.from_json(
306 DatasetType(
307 "a",
308 dimensionsA,
309 storageA,
310 ).to_json(),
311 self.universe,
312 ),
313 )
314 self.assertEqual(
315 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent"),
316 DatasetType.from_json(
317 DatasetType("a.b", dimensionsA, "test_b", parentStorageClass="parent").to_json(),
318 self.universe,
319 ),
320 )
322 def testSorting(self):
323 """Can we sort a DatasetType"""
324 storage = StorageClass("test_a")
325 dimensions = self.universe.extract(["instrument"])
327 d_a = DatasetType("a", dimensions, storage)
328 d_f = DatasetType("f", dimensions, storage)
329 d_p = DatasetType("p", dimensions, storage)
331 sort = sorted([d_p, d_f, d_a])
332 self.assertEqual(sort, [d_a, d_f, d_p])
334 # Now with strings
335 with self.assertRaises(TypeError):
336 sort = sorted(["z", d_p, "c", d_f, d_a, "d"])
338 def testHashability(self):
339 """Test `DatasetType.__hash__`.
341 This test is performed by checking that `DatasetType` entries can
342 be inserted into a `set` and that unique values of its
343 (`name`, `storageClass`, `dimensions`) parameters result in separate
344 entries (and equal ones don't).
346 This does not check for uniformity of hashing or the actual values
347 of the hash function.
348 """
349 types = []
350 unique = 0
351 storageC = StorageClass("test_c")
352 storageD = StorageClass("test_d")
353 for name in ["a", "b"]:
354 for storageClass in [storageC, storageD]:
355 for dimensions in [("instrument",), ("skymap",)]:
356 datasetType = DatasetType(name, self.universe.extract(dimensions), storageClass)
357 datasetTypeCopy = DatasetType(name, self.universe.extract(dimensions), storageClass)
358 types.extend((datasetType, datasetTypeCopy))
359 unique += 1 # datasetType should always equal its copy
360 self.assertEqual(len(set(types)), unique) # all other combinations are unique
362 # also check that hashes of instances constructed with StorageClass
363 # name matches hashes of instances constructed with instances
364 dimensions = self.universe.extract(["instrument"])
365 self.assertEqual(
366 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_c"))
367 )
368 self.assertEqual(
369 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_c"))
370 )
371 self.assertNotEqual(
372 hash(DatasetType("a", dimensions, storageC)), hash(DatasetType("a", dimensions, "test_d"))
373 )
374 self.assertNotEqual(
375 hash(DatasetType("a", dimensions, storageD)), hash(DatasetType("a", dimensions, "test_c"))
376 )
377 self.assertNotEqual(
378 hash(DatasetType("a", dimensions, "test_c")), hash(DatasetType("a", dimensions, "test_d"))
379 )
381 def testDeepCopy(self):
382 """Test that we can copy a dataset type."""
383 storageClass = StorageClass("test_copy")
384 datasetTypeName = "test"
385 dimensions = self.universe.extract(("instrument", "visit"))
386 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
387 dcopy = copy.deepcopy(datasetType)
388 self.assertEqual(dcopy, datasetType)
390 # Now with calibration flag set
391 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
392 dcopy = copy.deepcopy(datasetType)
393 self.assertEqual(dcopy, datasetType)
394 self.assertTrue(dcopy.isCalibration())
396 # And again with a composite
397 componentStorageClass = StorageClass("copy_component")
398 componentDatasetType = DatasetType(
399 DatasetType.nameWithComponent(datasetTypeName, "comp"),
400 dimensions,
401 componentStorageClass,
402 parentStorageClass=storageClass,
403 )
404 dcopy = copy.deepcopy(componentDatasetType)
405 self.assertEqual(dcopy, componentDatasetType)
407 def testPickle(self):
408 """Test pickle support."""
409 storageClass = StorageClass("test_pickle")
410 datasetTypeName = "test"
411 dimensions = self.universe.extract(("instrument", "visit"))
412 # Un-pickling requires that storage class is registered with factory.
413 StorageClassFactory().registerStorageClass(storageClass)
414 datasetType = DatasetType(datasetTypeName, dimensions, storageClass)
415 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
416 self.assertIsInstance(datasetTypeOut, DatasetType)
417 self.assertEqual(datasetType.name, datasetTypeOut.name)
418 self.assertEqual(datasetType.dimensions.names, datasetTypeOut.dimensions.names)
419 self.assertEqual(datasetType.storageClass, datasetTypeOut.storageClass)
420 self.assertIsNone(datasetTypeOut.parentStorageClass)
421 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
422 self.assertFalse(datasetTypeOut.isCalibration())
424 datasetType = DatasetType(datasetTypeName, dimensions, storageClass, isCalibration=True)
425 datasetTypeOut = pickle.loads(pickle.dumps(datasetType))
426 self.assertIs(datasetType.isCalibration(), datasetTypeOut.isCalibration())
427 self.assertTrue(datasetTypeOut.isCalibration())
429 # And again with a composite
430 componentStorageClass = StorageClass("pickle_component")
431 StorageClassFactory().registerStorageClass(componentStorageClass)
432 componentDatasetType = DatasetType(
433 DatasetType.nameWithComponent(datasetTypeName, "comp"),
434 dimensions,
435 componentStorageClass,
436 parentStorageClass=storageClass,
437 )
438 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
439 self.assertIsInstance(datasetTypeOut, DatasetType)
440 self.assertEqual(componentDatasetType.name, datasetTypeOut.name)
441 self.assertEqual(componentDatasetType.dimensions.names, datasetTypeOut.dimensions.names)
442 self.assertEqual(componentDatasetType.storageClass, datasetTypeOut.storageClass)
443 self.assertEqual(componentDatasetType.parentStorageClass, datasetTypeOut.parentStorageClass)
444 self.assertEqual(datasetTypeOut.parentStorageClass.name, storageClass.name)
445 self.assertEqual(datasetTypeOut, componentDatasetType)
447 # Now with a string and not a real storage class to test that
448 # pickling doesn't force the StorageClass to be resolved
449 componentDatasetType = DatasetType(
450 DatasetType.nameWithComponent(datasetTypeName, "comp"),
451 dimensions,
452 "StrangeComponent",
453 parentStorageClass="UnknownParent",
454 )
455 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
456 self.assertEqual(datasetTypeOut, componentDatasetType)
457 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
459 # Now with a storage class that is created by the factory
460 factoryStorageClassClass = StorageClassFactory.makeNewStorageClass("ParentClass")
461 factoryComponentStorageClassClass = StorageClassFactory.makeNewStorageClass("ComponentClass")
462 componentDatasetType = DatasetType(
463 DatasetType.nameWithComponent(datasetTypeName, "comp"),
464 dimensions,
465 factoryComponentStorageClassClass(),
466 parentStorageClass=factoryStorageClassClass(),
467 )
468 datasetTypeOut = pickle.loads(pickle.dumps(componentDatasetType))
469 self.assertEqual(datasetTypeOut, componentDatasetType)
470 self.assertEqual(datasetTypeOut._parentStorageClassName, componentDatasetType._parentStorageClassName)
472 def test_composites(self):
473 """Test components within composite DatasetTypes."""
474 storageClassA = StorageClass("compA")
475 storageClassB = StorageClass("compB")
476 storageClass = StorageClass(
477 "test_composite", components={"compA": storageClassA, "compB": storageClassB}
478 )
479 self.assertTrue(storageClass.isComposite())
480 self.assertFalse(storageClassA.isComposite())
481 self.assertFalse(storageClassB.isComposite())
483 dimensions = self.universe.extract(("instrument", "visit"))
485 datasetTypeComposite = DatasetType("composite", dimensions, storageClass)
486 datasetTypeComponentA = datasetTypeComposite.makeComponentDatasetType("compA")
487 datasetTypeComponentB = datasetTypeComposite.makeComponentDatasetType("compB")
489 self.assertTrue(datasetTypeComposite.isComposite())
490 self.assertFalse(datasetTypeComponentA.isComposite())
491 self.assertTrue(datasetTypeComponentB.isComponent())
492 self.assertFalse(datasetTypeComposite.isComponent())
494 self.assertEqual(datasetTypeComposite.name, "composite")
495 self.assertEqual(datasetTypeComponentA.name, "composite.compA")
496 self.assertEqual(datasetTypeComponentB.component(), "compB")
497 self.assertEqual(datasetTypeComposite.nameAndComponent(), ("composite", None))
498 self.assertEqual(datasetTypeComponentA.nameAndComponent(), ("composite", "compA"))
500 self.assertEqual(datasetTypeComponentA.parentStorageClass, storageClass)
501 self.assertEqual(datasetTypeComponentB.parentStorageClass, storageClass)
502 self.assertIsNone(datasetTypeComposite.parentStorageClass)
504 with self.assertRaises(KeyError):
505 datasetTypeComposite.makeComponentDatasetType("compF")
508class DatasetRefTestCase(unittest.TestCase):
509 """Test for DatasetRef."""
511 def setUp(self):
512 self.universe = DimensionUniverse()
513 datasetTypeName = "test"
514 self.componentStorageClass1 = StorageClass("Component1")
515 self.componentStorageClass2 = StorageClass("Component2")
516 self.parentStorageClass = StorageClass(
517 "Parent", components={"a": self.componentStorageClass1, "b": self.componentStorageClass2}
518 )
519 dimensions = self.universe.extract(("instrument", "visit"))
520 self.dataId = dict(instrument="DummyCam", visit=42)
521 self.datasetType = DatasetType(datasetTypeName, dimensions, self.parentStorageClass)
523 def testConstructor(self):
524 """Test that construction preserves and validates values."""
525 # Construct an unresolved ref.
526 ref = DatasetRef(self.datasetType, self.dataId)
527 self.assertEqual(ref.datasetType, self.datasetType)
528 self.assertEqual(
529 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
530 )
531 self.assertIsInstance(ref.dataId, DataCoordinate)
533 # Constructing a ref with an id but no run should fail.
534 with self.assertRaises(ValueError):
535 DatasetRef(self.datasetType, self.dataId, id=uuid.uuid4())
536 # Constructing an unresolved ref with run and/or components should
537 # issue a ref with an id.
538 run = "somerun"
539 ref = DatasetRef(self.datasetType, self.dataId, run=run)
540 self.assertIsNotNone(ref.id)
542 # Passing a data ID that is missing dimensions should fail.
543 with self.assertRaises(KeyError):
544 DatasetRef(self.datasetType, {"instrument": "DummyCam"})
545 # Constructing a resolved ref should preserve run as well as everything
546 # else.
547 id_ = uuid.uuid4()
548 ref = DatasetRef(self.datasetType, self.dataId, id=id_, run=run)
549 self.assertEqual(ref.datasetType, self.datasetType)
550 self.assertEqual(
551 ref.dataId, DataCoordinate.standardize(self.dataId, universe=self.universe), msg=ref.dataId
552 )
553 self.assertIsInstance(ref.dataId, DataCoordinate)
554 self.assertEqual(ref.id, id_)
555 self.assertEqual(ref.run, run)
557 with self.assertRaises(ValueError):
558 DatasetRef(self.datasetType, self.dataId, run=run, id_generation_mode=42)
560 def testSorting(self):
561 """Can we sort a DatasetRef"""
562 ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=1))
563 ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10))
564 ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=22))
566 # Enable detailed diff report
567 self.maxDiff = None
569 # This will sort them on visit number
570 sort = sorted([ref3, ref1, ref2])
571 self.assertEqual(sort, [ref1, ref2, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
573 # Now include a run
574 ref1 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=43), run="b")
575 self.assertEqual(ref1.run, "b")
576 ref4 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=10), run="b")
577 ref2 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=4), run="a")
578 ref3 = DatasetRef(self.datasetType, dict(instrument="DummyCam", visit=104), run="c")
580 # This will sort them on run before visit
581 sort = sorted([ref3, ref1, ref2, ref4])
582 self.assertEqual(sort, [ref2, ref4, ref1, ref3], msg=f"Got order: {[r.dataId for r in sort]}")
584 # Now with strings
585 with self.assertRaises(TypeError):
586 sort = sorted(["z", ref1, "c"])
588 def testResolving(self):
589 id_ = uuid.uuid4()
590 ref = DatasetRef(self.datasetType, self.dataId, id=id_, run="somerun")
591 unresolvedRef = ref.unresolved()
592 self.assertIsNotNone(ref.id)
593 self.assertIsNone(unresolvedRef.id)
594 self.assertIsNone(unresolvedRef.run)
595 self.assertNotEqual(ref, unresolvedRef)
596 self.assertEqual(ref.unresolved(), unresolvedRef)
597 self.assertEqual(ref.datasetType, unresolvedRef.datasetType)
598 self.assertEqual(ref.dataId, unresolvedRef.dataId)
599 reresolvedRef = unresolvedRef.resolved(id=id_, run="somerun")
600 self.assertEqual(ref, reresolvedRef)
601 self.assertEqual(reresolvedRef.unresolved(), unresolvedRef)
602 self.assertIsNotNone(reresolvedRef.run)
604 other_resolved = DatasetIdFactory().resolveRef(unresolvedRef, "somerun")
605 self.assertEqual(other_resolved.run, "somerun")
607 def testOverrideStorageClass(self):
608 storageA = StorageClass("test_a", pytype=list)
610 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
612 ref_new = ref.overrideStorageClass(storageA)
613 self.assertNotEqual(ref, ref_new)
614 self.assertEqual(ref_new.datasetType.storageClass, storageA)
615 self.assertEqual(ref_new.overrideStorageClass(ref.datasetType.storageClass), ref)
617 incompatible_sc = StorageClass("my_int", pytype=int)
618 with self.assertRaises(ValueError):
619 # Do not test against "ref" because it has a default storage class
620 # of "object" which is compatible with everything.
621 ref_new.overrideStorageClass(incompatible_sc)
623 def testPickle(self):
624 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
625 s = pickle.dumps(ref)
626 self.assertEqual(pickle.loads(s), ref)
628 def testJson(self):
629 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
630 s = ref.to_json()
631 self.assertEqual(DatasetRef.from_json(s, universe=self.universe), ref)
633 def testFileDataset(self):
634 ref = DatasetRef(self.datasetType, self.dataId, run="somerun")
635 file_dataset = FileDataset(path="something.yaml", refs=ref)
636 self.assertEqual(file_dataset.refs, [ref])
638 ref2 = DatasetRef(self.datasetType, self.dataId, run="somerun2")
639 with self.assertRaises(ValueError):
640 FileDataset(path="other.yaml", refs=[ref, ref2])
643if __name__ == "__main__":
644 unittest.main()