Coverage for python/lsst/daf/butler/tests/_datasetsHelper.py: 33%

56 statements  

« prev     ^ index     » next       coverage.py v6.4.4, created at 2022-08-26 02:22 -0700

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22__all__ = ( 

23 "DatasetTestHelper", 

24 "DatastoreTestHelper", 

25 "BadWriteFormatter", 

26 "BadNoWriteFormatter", 

27 "MultiDetectorFormatter", 

28) 

29 

30import os 

31 

32from lsst.daf.butler import DatasetRef, DatasetType, StorageClass 

33from lsst.daf.butler.formatters.yaml import YamlFormatter 

34 

35 

36class DatasetTestHelper: 

37 """Helper methods for Datasets""" 

38 

39 def makeDatasetRef( 

40 self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None, conform=True 

41 ): 

42 """Make a DatasetType and wrap it in a DatasetRef for a test""" 

43 return self._makeDatasetRef( 

44 datasetTypeName, dimensions, storageClass, dataId, id=id, run=run, conform=conform 

45 ) 

46 

47 def _makeDatasetRef( 

48 self, datasetTypeName, dimensions, storageClass, dataId, *, id=None, run=None, conform=True 

49 ): 

50 # helper for makeDatasetRef 

51 

52 # Pretend we have a parent if this looks like a composite 

53 compositeName, componentName = DatasetType.splitDatasetTypeName(datasetTypeName) 

54 parentStorageClass = StorageClass("component") if componentName else None 

55 

56 datasetType = DatasetType( 

57 datasetTypeName, dimensions, storageClass, parentStorageClass=parentStorageClass 

58 ) 

59 if id is None: 

60 self.id += 1 

61 id = self.id 

62 if run is None: 

63 run = "dummy" 

64 return DatasetRef(datasetType, dataId, id=id, run=run, conform=conform) 

65 

66 

67class DatastoreTestHelper: 

68 """Helper methods for Datastore tests""" 

69 

70 def setUpDatastoreTests(self, registryClass, configClass): 

71 """Shared setUp code for all Datastore tests""" 

72 self.registry = registryClass() 

73 

74 # Need to keep ID for each datasetRef since we have no butler 

75 # for these tests 

76 self.id = 1 

77 

78 self.config = configClass(self.configFile) 

79 

80 # Some subclasses override the working root directory 

81 if self.root is not None: 

82 self.datastoreType.setConfigRoot(self.root, self.config, self.config.copy()) 

83 

84 def makeDatastore(self, sub=None): 

85 """Make a new Datastore instance of the appropriate type. 

86 

87 Parameters 

88 ---------- 

89 sub : str, optional 

90 If not None, the returned Datastore will be distinct from any 

91 Datastore constructed with a different value of ``sub``. For 

92 PosixDatastore, for example, the converse is also true, and ``sub`` 

93 is used as a subdirectory to form the new root. 

94 

95 Returns 

96 ------- 

97 datastore : `Datastore` 

98 Datastore constructed by this routine using the supplied 

99 optional subdirectory if supported. 

100 """ 

101 config = self.config.copy() 

102 if sub is not None and self.root is not None: 

103 self.datastoreType.setConfigRoot(os.path.join(self.root, sub), config, self.config) 

104 if sub is not None: 

105 # Ensure that each datastore gets its own registry 

106 registryClass = type(self.registry) 

107 registry = registryClass() 

108 else: 

109 registry = self.registry 

110 return self.datastoreType(config=config, bridgeManager=registry.getDatastoreBridgeManager()) 

111 

112 

113class BadWriteFormatter(YamlFormatter): 

114 """A formatter that never works but does leave a file behind.""" 

115 

116 def _readFile(self, path, pytype=None): 

117 raise NotImplementedError("This formatter can not read anything") 

118 

119 def _writeFile(self, inMemoryDataset): 

120 """Write an empty file and then raise an exception.""" 

121 with open(self.fileDescriptor.location.path, "wb"): 

122 pass 

123 raise RuntimeError("Did not succeed in writing file") 

124 

125 

126class BadNoWriteFormatter(BadWriteFormatter): 

127 """A formatter that always fails without writing anything.""" 

128 

129 def _writeFile(self, inMemoryDataset): 

130 raise RuntimeError("Did not writing anything at all") 

131 

132 

133class MultiDetectorFormatter(YamlFormatter): 

134 def _writeFile(self, inMemoryDataset): 

135 raise NotImplementedError("Can not write") 

136 

137 def _fromBytes(self, serializedDataset, pytype=None): 

138 data = super()._fromBytes(serializedDataset) 

139 if self.dataId is None: 

140 raise RuntimeError("This formatter requires a dataId") 

141 if "detector" not in self.dataId: 

142 raise RuntimeError("This formatter requires detector to be present in dataId") 

143 key = f"detector{self.dataId['detector']}" 

144 if key in data: 

145 return pytype(data[key]) 

146 raise RuntimeError(f"Could not find '{key}' in data file")