Coverage for python/lsst/daf/butler/tests/utils.py: 28%

84 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-10-12 09:01 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (http://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <http://www.gnu.org/licenses/>. 

21 

22from __future__ import annotations 

23 

24__all__ = () 

25 

26import io 

27import os 

28import shutil 

29import tempfile 

30from contextlib import contextmanager 

31from typing import Optional 

32 

33import astropy 

34from astropy.table import Table as AstropyTable 

35from astropy.utils.diff import report_diff_values 

36 

37from .. import Butler, Config, StorageClassFactory 

38from ..registry import CollectionType 

39from ..tests import MetricsExample, addDatasetType 

40 

41 

42def makeTestTempDir(default_base: str) -> str: 

43 """Create a temporary directory for test usage. 

44 

45 The directory will be created within ``DAF_BUTLER_TEST_TMP`` if that 

46 environment variable is set, falling back to ``default_base`` if it is 

47 not. 

48 

49 Parameters 

50 ---------- 

51 default_base : `str` 

52 Default parent directory. 

53 

54 Returns 

55 ------- 

56 dir : `str` 

57 Name of the new temporary directory. 

58 """ 

59 base = os.environ.get("DAF_BUTLER_TEST_TMP", default_base) 

60 return tempfile.mkdtemp(dir=base) 

61 

62 

63def removeTestTempDir(root: Optional[str]) -> None: 

64 """Attempt to remove a temporary test directory, but do not raise if 

65 unable to. 

66 

67 Unlike `tempfile.TemporaryDirectory`, this passes ``ignore_errors=True`` 

68 to ``shutil.rmtree`` at close, making it safe to use on NFS. 

69 

70 Parameters 

71 ---------- 

72 root : `str`, optional 

73 Name of the directory to be removed. If `None`, nothing will be done. 

74 """ 

75 if root is not None and os.path.exists(root): 

76 shutil.rmtree(root, ignore_errors=True) 

77 

78 

79@contextmanager 

80def safeTestTempDir(default_base: str) -> str: 

81 """Return a context manager that creates a temporary directory and then 

82 attempts to remove it. 

83 

84 Parameters 

85 ---------- 

86 default_base : `str` 

87 Default parent directory, forwarded to `makeTestTempDir`. 

88 

89 Returns 

90 ------- 

91 context : `contextlib.ContextManager` 

92 A context manager that returns the new directory name on ``__enter__`` 

93 and removes the temporary directory (via `removeTestTempDir`) on 

94 ``__exit__``. 

95 """ 

96 root = makeTestTempDir(default_base) 

97 try: 

98 yield root 

99 finally: 

100 removeTestTempDir(root) 

101 

102 

103class ButlerTestHelper: 

104 """Mixin with helpers for unit tests.""" 

105 

106 def assertAstropyTablesEqual(self, tables, expectedTables, filterColumns=False, unorderedRows=False): 

107 """Verify that a list of astropy tables matches a list of expected 

108 astropy tables. 

109 

110 Parameters 

111 ---------- 

112 tables : `astropy.table.Table` or iterable [`astropy.table.Table`] 

113 The table or tables that should match the expected tables. 

114 expectedTables : `astropy.table.Table` 

115 or iterable [`astropy.table.Table`] 

116 The tables with expected values to which the tables under test will 

117 be compared. 

118 filterColumns : `bool` 

119 If `True` then only compare columns that exist in 

120 ``expectedTables``. 

121 unorderedRows : `bool`, optional 

122 If `True` (`False` is default), don't require tables to have their 

123 rows in the same order. 

124 """ 

125 # If a single table is passed in for tables or expectedTables, put it 

126 # in a list. 

127 if isinstance(tables, AstropyTable): 

128 tables = [tables] 

129 if isinstance(expectedTables, AstropyTable): 

130 expectedTables = [expectedTables] 

131 diff = io.StringIO() 

132 self.assertEqual(len(tables), len(expectedTables)) 

133 for table, expected in zip(tables, expectedTables): 

134 # Assert that we are testing what we think we are testing: 

135 self.assertIsInstance(table, AstropyTable) 

136 self.assertIsInstance(expected, AstropyTable) 

137 if filterColumns: 

138 table = table.copy() 

139 table.keep_columns(expected.colnames) 

140 if unorderedRows: 

141 table = table.copy() 

142 table.sort(table.colnames) 

143 expected = expected.copy() 

144 expected.sort(expected.colnames) 

145 # Assert that they match: 

146 self.assertTrue(report_diff_values(table, expected, fileobj=diff), msg="\n" + diff.getvalue()) 

147 

148 

149def readTable(textTable): 

150 """Read an astropy table from formatted text. 

151 

152 Contains formatting that causes the astropy table to print an empty string 

153 instead of "--" for missing/unpopulated values in the text table. 

154 

155 

156 Parameters 

157 ---------- 

158 textTable : `str` 

159 The text version of the table to read. 

160 

161 Returns 

162 ------- 

163 table : `astropy.table.Table` 

164 The table as an astropy table. 

165 """ 

166 return AstropyTable.read( 

167 textTable, 

168 format="ascii", 

169 data_start=2, # skip the header row and the header row underlines. 

170 fill_values=[("", 0, "")], 

171 ) 

172 

173 

174class MetricTestRepo: 

175 """Creates and manage a test repository on disk with datasets that 

176 may be queried and modified for unit tests. 

177 

178 Parameters 

179 ---------- 

180 root : `str` 

181 The location of the repository, to pass to ``Butler.makeRepo``. 

182 configFile : `str` 

183 The path to the config file, to pass to ``Butler.makeRepo``. 

184 """ 

185 

186 @staticmethod 

187 def _makeExampleMetrics(): 

188 """Make an object to put into the repository.""" 

189 return MetricsExample( 

190 {"AM1": 5.2, "AM2": 30.6}, 

191 {"a": [1, 2, 3], "b": {"blue": 5, "red": "green"}}, 

192 [563, 234, 456.7, 752, 8, 9, 27], 

193 ) 

194 

195 @staticmethod 

196 def _makeDimensionData(id, name, datetimeBegin=None, datetimeEnd=None): 

197 """Make a dict of dimensional data with default values to insert into 

198 the registry. 

199 """ 

200 data = dict(instrument="DummyCamComp", id=id, name=name, physical_filter="d-r", visit_system=1) 

201 if datetimeBegin: 

202 data["datetime_begin"] = datetimeBegin 

203 data["datetime_end"] = datetimeEnd 

204 return data 

205 

206 def __init__(self, root, configFile): 

207 self.root = root 

208 Butler.makeRepo(self.root, config=Config(configFile)) 

209 butlerConfigFile = os.path.join(self.root, "butler.yaml") 

210 self.storageClassFactory = StorageClassFactory() 

211 self.storageClassFactory.addFromConfig(butlerConfigFile) 

212 

213 # New datasets will be added to run and tag, but we will only look in 

214 # tag when looking up datasets. 

215 run = "ingest/run" 

216 tag = "ingest" 

217 self.butler = Butler(butlerConfigFile, run=run, collections=[tag]) 

218 self.butler.registry.registerCollection(tag, CollectionType.TAGGED) 

219 

220 # Create and register a DatasetType 

221 self.datasetType = addDatasetType( 

222 self.butler, "test_metric_comp", ("instrument", "visit"), "StructuredCompositeReadComp" 

223 ) 

224 

225 # Add needed Dimensions 

226 self.butler.registry.insertDimensionData("instrument", {"name": "DummyCamComp"}) 

227 self.butler.registry.insertDimensionData( 

228 "physical_filter", {"instrument": "DummyCamComp", "name": "d-r", "band": "R"} 

229 ) 

230 self.butler.registry.insertDimensionData( 

231 "visit_system", {"instrument": "DummyCamComp", "id": 1, "name": "default"} 

232 ) 

233 visitStart = astropy.time.Time("2020-01-01 08:00:00.123456789", scale="tai") 

234 visitEnd = astropy.time.Time("2020-01-01 08:00:36.66", scale="tai") 

235 self.butler.registry.insertDimensionData( 

236 "visit", 

237 dict( 

238 instrument="DummyCamComp", 

239 id=423, 

240 name="fourtwentythree", 

241 physical_filter="d-r", 

242 datetimeBegin=visitStart, 

243 datetimeEnd=visitEnd, 

244 ), 

245 ) 

246 self.butler.registry.insertDimensionData( 

247 "visit", 

248 dict( 

249 instrument="DummyCamComp", 

250 id=424, 

251 name="fourtwentyfour", 

252 physical_filter="d-r", 

253 ), 

254 ) 

255 

256 self.addDataset({"instrument": "DummyCamComp", "visit": 423}) 

257 self.addDataset({"instrument": "DummyCamComp", "visit": 424}) 

258 

259 def addDataset(self, dataId, run=None, datasetType=None): 

260 """Create a new example metric and add it to the named run with the 

261 given dataId. 

262 

263 Overwrites tags, so this does not try to associate the new dataset with 

264 existing tags. (If/when tags are needed this can be added to the 

265 arguments of this function.) 

266 

267 Parameters 

268 ---------- 

269 dataId : `dict` 

270 The dataId for the new metric. 

271 run : `str`, optional 

272 The name of the run to create and add a dataset to. If `None`, the 

273 dataset will be added to the root butler. 

274 datasetType : ``DatasetType``, optional 

275 The dataset type of the added dataset. If `None`, will use the 

276 default dataset type. 

277 """ 

278 if run: 

279 self.butler.registry.registerCollection(run, type=CollectionType.RUN) 

280 metric = self._makeExampleMetrics() 

281 self.butler.put(metric, self.datasetType if datasetType is None else datasetType, dataId, run=run)