Coverage for tests/test_exprParserLex.py: 10%

182 statements  

« prev     ^ index     » next       coverage.py v6.5.0, created at 2022-12-01 19:55 +0000

1# This file is part of daf_butler. 

2# 

3# Developed for the LSST Data Management System. 

4# This product includes software developed by the LSST Project 

5# (https://www.lsst.org). 

6# See the COPYRIGHT file at the top-level directory of this distribution 

7# for details of code ownership. 

8# 

9# This program is free software: you can redistribute it and/or modify 

10# it under the terms of the GNU General Public License as published by 

11# the Free Software Foundation, either version 3 of the License, or 

12# (at your option) any later version. 

13# 

14# This program is distributed in the hope that it will be useful, 

15# but WITHOUT ANY WARRANTY; without even the implied warranty of 

16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 

17# GNU General Public License for more details. 

18# 

19# You should have received a copy of the GNU General Public License 

20# along with this program. If not, see <https://www.gnu.org/licenses/>. 

21 

22"""Simple unit test for expr_parser/parserLex module. 

23""" 

24 

25import re 

26import unittest 

27 

28from lsst.daf.butler.registry.queries.expressions import ParserLex, ParserLexError 

29 

30 

31class ParserLexTestCase(unittest.TestCase): 

32 """A test case for ParserLex 

33 """ 

34 

35 def _assertToken(self, token, type, value, lineno=None, lexpos=None): 

36 self.assertIsNotNone(token) 

37 self.assertEqual(token.type, type) 

38 self.assertEqual(token.value, value) 

39 if lineno is not None: 

40 self.assertEqual(token.lineno, lineno) 

41 if lexpos is not None: 

42 self.assertEqual(token.lexpos, lexpos) 

43 

44 def setUp(self): 

45 pass 

46 

47 def tearDown(self): 

48 pass 

49 

50 def testInstantiate(self): 

51 """Tests for making ParserLex instances 

52 """ 

53 

54 default_reflags = re.IGNORECASE | re.VERBOSE 

55 lexer = ParserLex.make_lexer() 

56 self.assertEqual(lexer.lexreflags, default_reflags) 

57 

58 lexer = ParserLex.make_lexer(reflags=re.DOTALL) 

59 self.assertEqual(lexer.lexreflags, re.DOTALL | default_reflags) 

60 

61 def testSimpleTokens(self): 

62 """Test for simple tokens""" 

63 lexer = ParserLex.make_lexer() 

64 

65 lexer.input("=!= <<= >>= +-*/()") 

66 self._assertToken(lexer.token(), 'EQ', '=') 

67 self._assertToken(lexer.token(), 'NE', '!=') 

68 self._assertToken(lexer.token(), 'LT', '<') 

69 self._assertToken(lexer.token(), 'LE', '<=') 

70 self._assertToken(lexer.token(), 'GT', '>') 

71 self._assertToken(lexer.token(), 'GE', '>=') 

72 self._assertToken(lexer.token(), 'ADD', '+') 

73 self._assertToken(lexer.token(), 'SUB', '-') 

74 self._assertToken(lexer.token(), 'MUL', '*') 

75 self._assertToken(lexer.token(), 'DIV', '/') 

76 self._assertToken(lexer.token(), 'LPAREN', '(') 

77 self._assertToken(lexer.token(), 'RPAREN', ')') 

78 self.assertIsNone(lexer.token()) 

79 

80 def testReservedTokens(self): 

81 """Test for reserved words""" 

82 lexer = ParserLex.make_lexer() 

83 

84 tokens = "NOT IN OR AND OVERLAPS" 

85 lexer.input(tokens) 

86 for token in tokens.split(): 

87 self._assertToken(lexer.token(), token, token) 

88 self.assertIsNone(lexer.token()) 

89 

90 tokens = "not in or and overlaps" 

91 lexer.input(tokens) 

92 for token in tokens.split(): 

93 self._assertToken(lexer.token(), token.upper(), token.upper()) 

94 self.assertIsNone(lexer.token()) 

95 

96 # not reserved 

97 token = "NOTIN" 

98 lexer.input(token) 

99 self._assertToken(lexer.token(), "SIMPLE_IDENTIFIER", token) 

100 self.assertIsNone(lexer.token()) 

101 

102 def testStringLiteral(self): 

103 """Test for string literals""" 

104 lexer = ParserLex.make_lexer() 

105 

106 lexer.input("''") 

107 self._assertToken(lexer.token(), "STRING_LITERAL", "") 

108 self.assertIsNone(lexer.token()) 

109 

110 lexer.input("'string'") 

111 self._assertToken(lexer.token(), "STRING_LITERAL", "string") 

112 self.assertIsNone(lexer.token()) 

113 

114 lexer.input("'string' 'string'\n'string'") 

115 self._assertToken(lexer.token(), "STRING_LITERAL", "string") 

116 self._assertToken(lexer.token(), "STRING_LITERAL", "string") 

117 self._assertToken(lexer.token(), "STRING_LITERAL", "string") 

118 self.assertIsNone(lexer.token()) 

119 

120 # odd newline inside string 

121 lexer.input("'string\nstring'") 

122 with self.assertRaises(ParserLexError): 

123 lexer.token() 

124 

125 lexer.input("'string") 

126 with self.assertRaises(ParserLexError): 

127 lexer.token() 

128 

129 def testNumericLiteral(self): 

130 """Test for numeric literals""" 

131 lexer = ParserLex.make_lexer() 

132 

133 lexer.input("0 100 999. 100.1 1e10 1e-10 1.e+20 .2E5") 

134 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "0") 

135 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "100") 

136 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "999.") 

137 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "100.1") 

138 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "1e10") 

139 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "1e-10") 

140 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "1.e+20") 

141 self._assertToken(lexer.token(), "NUMERIC_LITERAL", ".2E5") 

142 self.assertIsNone(lexer.token()) 

143 

144 def testRangeLiteral(self): 

145 """Test for range literals""" 

146 lexer = ParserLex.make_lexer() 

147 

148 lexer.input("0..10 -10..-1 -10..10:2 0 .. 10 0 .. 10 : 2 ") 

149 self._assertToken(lexer.token(), "RANGE_LITERAL", (0, 10, None)) 

150 self._assertToken(lexer.token(), "RANGE_LITERAL", (-10, -1, None)) 

151 self._assertToken(lexer.token(), "RANGE_LITERAL", (-10, 10, 2)) 

152 self._assertToken(lexer.token(), "RANGE_LITERAL", (0, 10, None)) 

153 self._assertToken(lexer.token(), "RANGE_LITERAL", (0, 10, 2)) 

154 self.assertIsNone(lexer.token()) 

155 

156 def testTimeLiteral(self): 

157 """Test for time literals""" 

158 lexer = ParserLex.make_lexer() 

159 

160 # string can contain anything, lexer does not check it 

161 lexer.input("T'2020-03-30' T'2020-03-30 00:00:00' T'2020-03-30T00:00:00' T'123.456' T'time'") 

162 self._assertToken(lexer.token(), "TIME_LITERAL", "2020-03-30") 

163 self._assertToken(lexer.token(), "TIME_LITERAL", "2020-03-30 00:00:00") 

164 self._assertToken(lexer.token(), "TIME_LITERAL", "2020-03-30T00:00:00") 

165 self._assertToken(lexer.token(), "TIME_LITERAL", "123.456") 

166 self._assertToken(lexer.token(), "TIME_LITERAL", "time") 

167 self.assertIsNone(lexer.token()) 

168 

169 def testIdentifier(self): 

170 """Test for numeric literals""" 

171 lexer = ParserLex.make_lexer() 

172 

173 lexer.input("ID id _012 a_b_C") 

174 self._assertToken(lexer.token(), "SIMPLE_IDENTIFIER", "ID") 

175 self._assertToken(lexer.token(), "SIMPLE_IDENTIFIER", "id") 

176 self._assertToken(lexer.token(), "SIMPLE_IDENTIFIER", "_012") 

177 self._assertToken(lexer.token(), "SIMPLE_IDENTIFIER", "a_b_C") 

178 self.assertIsNone(lexer.token()) 

179 

180 lexer.input("a.b a.b.c _._ _._._") 

181 self._assertToken(lexer.token(), "QUALIFIED_IDENTIFIER", "a.b") 

182 self._assertToken(lexer.token(), "QUALIFIED_IDENTIFIER", "a.b.c") 

183 self._assertToken(lexer.token(), "QUALIFIED_IDENTIFIER", "_._") 

184 self._assertToken(lexer.token(), "QUALIFIED_IDENTIFIER", "_._._") 

185 self.assertIsNone(lexer.token()) 

186 

187 lexer.input(".id") 

188 with self.assertRaises(ParserLexError): 

189 lexer.token() 

190 

191 lexer.input("id.") 

192 self._assertToken(lexer.token(), "SIMPLE_IDENTIFIER", "id") 

193 with self.assertRaises(ParserLexError): 

194 lexer.token() 

195 

196 lexer.input("id.id.id.id") 

197 self._assertToken(lexer.token(), "QUALIFIED_IDENTIFIER", "id.id.id") 

198 with self.assertRaises(ParserLexError): 

199 lexer.token() 

200 

201 def testExpression(self): 

202 """Test for more or less complete expression""" 

203 lexer = ParserLex.make_lexer() 

204 

205 expr = ("((instrument='HSC' AND detector != 9) OR instrument='CFHT') " 

206 "AND tract=8766 AND patch.cell_x > 5 AND " 

207 "patch.cell_y < 4 AND band='i' " 

208 "or visit IN (1..50:2)") 

209 tokens = (("LPAREN", "("), 

210 ("LPAREN", "("), 

211 ("SIMPLE_IDENTIFIER", "instrument"), 

212 ("EQ", "="), 

213 ("STRING_LITERAL", "HSC"), 

214 ("AND", "AND"), 

215 ("SIMPLE_IDENTIFIER", "detector"), 

216 ("NE", "!="), 

217 ("NUMERIC_LITERAL", "9"), 

218 ("RPAREN", ")"), 

219 ("OR", "OR"), 

220 ("SIMPLE_IDENTIFIER", "instrument"), 

221 ("EQ", "="), 

222 ("STRING_LITERAL", "CFHT"), 

223 ("RPAREN", ")"), 

224 ("AND", "AND"), 

225 ("SIMPLE_IDENTIFIER", "tract"), 

226 ("EQ", "="), 

227 ("NUMERIC_LITERAL", "8766"), 

228 ("AND", "AND"), 

229 ("QUALIFIED_IDENTIFIER", "patch.cell_x"), 

230 ("GT", ">"), 

231 ("NUMERIC_LITERAL", "5"), 

232 ("AND", "AND"), 

233 ("QUALIFIED_IDENTIFIER", "patch.cell_y"), 

234 ("LT", "<"), 

235 ("NUMERIC_LITERAL", "4"), 

236 ("AND", "AND"), 

237 ("SIMPLE_IDENTIFIER", "band"), 

238 ("EQ", "="), 

239 ("STRING_LITERAL", "i"), 

240 ("OR", "OR"), 

241 ("SIMPLE_IDENTIFIER", "visit"), 

242 ("IN", "IN"), 

243 ("LPAREN", "("), 

244 ("RANGE_LITERAL", (1, 50, 2)), 

245 ("RPAREN", ")"), 

246 ) 

247 lexer.input(expr) 

248 for type, value in tokens: 

249 self._assertToken(lexer.token(), type, value) 

250 self.assertIsNone(lexer.token()) 

251 

252 def testExceptions(self): 

253 """Test for exception contents""" 

254 

255 def _assertExc(exc, expr, remain, pos, lineno): 

256 """Check exception attribute values""" 

257 self.assertEqual(exc.expression, expr) 

258 self.assertEqual(exc.remain, remain) 

259 self.assertEqual(exc.pos, pos) 

260 self.assertEqual(exc.lineno, lineno) 

261 

262 lexer = ParserLex.make_lexer() 

263 expr = "a.b.c.d" 

264 lexer.input(expr) 

265 self._assertToken(lexer.token(), "QUALIFIED_IDENTIFIER", "a.b.c") 

266 with self.assertRaises(ParserLexError) as catcher: 

267 lexer.token() 

268 _assertExc(catcher.exception, expr, ".d", 5, 1) 

269 

270 lexer = ParserLex.make_lexer() 

271 expr = "a \n& b" 

272 lexer.input(expr) 

273 self._assertToken(lexer.token(), "SIMPLE_IDENTIFIER", "a") 

274 with self.assertRaises(ParserLexError) as catcher: 

275 lexer.token() 

276 _assertExc(catcher.exception, expr, "& b", 3, 2) 

277 

278 lexer = ParserLex.make_lexer() 

279 expr = "a\n=\n1e5.e2" 

280 lexer.input(expr) 

281 self._assertToken(lexer.token(), "SIMPLE_IDENTIFIER", "a") 

282 self._assertToken(lexer.token(), "EQ", "=") 

283 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "1e5") 

284 with self.assertRaises(ParserLexError) as catcher: 

285 lexer.token() 

286 _assertExc(catcher.exception, expr, ".e2", 7, 3) 

287 

288 # zero stride in range literal 

289 lexer = ParserLex.make_lexer() 

290 expr = "1..2:0" 

291 lexer.input(expr) 

292 self._assertToken(lexer.token(), "RANGE_LITERAL", (1, 2, None)) 

293 with self.assertRaises(ParserLexError) as catcher: 

294 lexer.token() 

295 _assertExc(catcher.exception, expr, ":0", 4, 1) 

296 

297 # negative stride in range literal 

298 lexer = ParserLex.make_lexer() 

299 expr = "1..2:-10" 

300 lexer.input(expr) 

301 self._assertToken(lexer.token(), "RANGE_LITERAL", (1, 2, None)) 

302 with self.assertRaises(ParserLexError) as catcher: 

303 lexer.token() 

304 _assertExc(catcher.exception, expr, ":-10", 4, 1) 

305 

306 

307if __name__ == "__main__": 307 ↛ 308line 307 didn't jump to line 308, because the condition on line 307 was never true

308 unittest.main()