Coverage for tests/test_exprParserLex.py : 10%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22"""Simple unit test for expr_parser/parserLex module.
23"""
25import re
26import unittest
28from lsst.daf.butler.registry.queries.exprParser import ParserLex, ParserLexError
31class ParserLexTestCase(unittest.TestCase):
32 """A test case for ParserLex
33 """
35 def _assertToken(self, token, type, value, lineno=None, lexpos=None):
36 self.assertIsNotNone(token)
37 self.assertEqual(token.type, type)
38 self.assertEqual(token.value, value)
39 if lineno is not None:
40 self.assertEqual(token.lineno, lineno)
41 if lexpos is not None:
42 self.assertEqual(token.lexpos, lexpos)
44 def setUp(self):
45 pass
47 def tearDown(self):
48 pass
50 def testInstantiate(self):
51 """Tests for making ParserLex instances
52 """
54 default_reflags = re.IGNORECASE | re.VERBOSE
55 lexer = ParserLex.make_lexer()
56 self.assertEqual(lexer.lexreflags, default_reflags)
58 lexer = ParserLex.make_lexer(reflags=re.DOTALL)
59 self.assertEqual(lexer.lexreflags, re.DOTALL | default_reflags)
61 def testSimpleTokens(self):
62 """Test for simple tokens"""
63 lexer = ParserLex.make_lexer()
65 lexer.input("=!= <<= >>= +-*/()")
66 self._assertToken(lexer.token(), 'EQ', '=')
67 self._assertToken(lexer.token(), 'NE', '!=')
68 self._assertToken(lexer.token(), 'LT', '<')
69 self._assertToken(lexer.token(), 'LE', '<=')
70 self._assertToken(lexer.token(), 'GT', '>')
71 self._assertToken(lexer.token(), 'GE', '>=')
72 self._assertToken(lexer.token(), 'ADD', '+')
73 self._assertToken(lexer.token(), 'SUB', '-')
74 self._assertToken(lexer.token(), 'MUL', '*')
75 self._assertToken(lexer.token(), 'DIV', '/')
76 self._assertToken(lexer.token(), 'LPAREN', '(')
77 self._assertToken(lexer.token(), 'RPAREN', ')')
78 self.assertIsNone(lexer.token())
80 def testReservedTokens(self):
81 """Test for reserved words"""
82 lexer = ParserLex.make_lexer()
84# tokens = "IS NOT IN NULL OR AND BETWEEN LIKE ESCAPE REGEXP"
85 tokens = "NOT IN OR AND"
86 lexer.input(tokens)
87 for token in tokens.split():
88 self._assertToken(lexer.token(), token, token)
89 self.assertIsNone(lexer.token())
91# tokens = "is not in null or and between like escape regexp"
92 tokens = "not in or and"
93 lexer.input(tokens)
94 for token in tokens.split():
95 self._assertToken(lexer.token(), token.upper(), token)
96 self.assertIsNone(lexer.token())
98 # not reserved
99 token = "ISNOTIN"
100 lexer.input(token)
101 self._assertToken(lexer.token(), "IDENTIFIER", token)
102 self.assertIsNone(lexer.token())
104 def testStringLiteral(self):
105 """Test for string literals"""
106 lexer = ParserLex.make_lexer()
108 lexer.input("''")
109 self._assertToken(lexer.token(), "STRING_LITERAL", "")
110 self.assertIsNone(lexer.token())
112 lexer.input("'string'")
113 self._assertToken(lexer.token(), "STRING_LITERAL", "string")
114 self.assertIsNone(lexer.token())
116 lexer.input("'string' 'string'\n'string'")
117 self._assertToken(lexer.token(), "STRING_LITERAL", "string")
118 self._assertToken(lexer.token(), "STRING_LITERAL", "string")
119 self._assertToken(lexer.token(), "STRING_LITERAL", "string")
120 self.assertIsNone(lexer.token())
122 # odd newline inside string
123 lexer.input("'string\nstring'")
124 with self.assertRaises(ParserLexError):
125 lexer.token()
127 lexer.input("'string")
128 with self.assertRaises(ParserLexError):
129 lexer.token()
131 def testNumericLiteral(self):
132 """Test for numeric literals"""
133 lexer = ParserLex.make_lexer()
135 lexer.input("0 100 999. 100.1 1e10 1e-10 1.e+20 .2E5")
136 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "0")
137 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "100")
138 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "999.")
139 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "100.1")
140 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "1e10")
141 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "1e-10")
142 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "1.e+20")
143 self._assertToken(lexer.token(), "NUMERIC_LITERAL", ".2E5")
144 self.assertIsNone(lexer.token())
146 def testRangeLiteral(self):
147 """Test for range literals"""
148 lexer = ParserLex.make_lexer()
150 lexer.input("0..10 -10..-1 -10..10:2 0 .. 10 0 .. 10 : 2 ")
151 self._assertToken(lexer.token(), "RANGE_LITERAL", (0, 10, None))
152 self._assertToken(lexer.token(), "RANGE_LITERAL", (-10, -1, None))
153 self._assertToken(lexer.token(), "RANGE_LITERAL", (-10, 10, 2))
154 self._assertToken(lexer.token(), "RANGE_LITERAL", (0, 10, None))
155 self._assertToken(lexer.token(), "RANGE_LITERAL", (0, 10, 2))
156 self.assertIsNone(lexer.token())
158 def testIdentifier(self):
159 """Test for numeric literals"""
160 lexer = ParserLex.make_lexer()
162 lexer.input("ID id _012 a_b_C")
163 self._assertToken(lexer.token(), "IDENTIFIER", "ID")
164 self._assertToken(lexer.token(), "IDENTIFIER", "id")
165 self._assertToken(lexer.token(), "IDENTIFIER", "_012")
166 self._assertToken(lexer.token(), "IDENTIFIER", "a_b_C")
167 self.assertIsNone(lexer.token())
169 lexer.input("a.b _._")
170 self._assertToken(lexer.token(), "IDENTIFIER", "a.b")
171 self._assertToken(lexer.token(), "IDENTIFIER", "_._")
172 self.assertIsNone(lexer.token())
174 lexer.input(".id")
175 with self.assertRaises(ParserLexError):
176 lexer.token()
178 lexer.input("id.")
179 self._assertToken(lexer.token(), "IDENTIFIER", "id")
180 with self.assertRaises(ParserLexError):
181 lexer.token()
183 lexer.input("id.id.id")
184 self._assertToken(lexer.token(), "IDENTIFIER", "id.id")
185 with self.assertRaises(ParserLexError):
186 lexer.token()
188 def testExpression(self):
189 """Test for more or less complete expression"""
190 lexer = ParserLex.make_lexer()
192 expr = ("((instrument='HSC' AND detector != 9) OR instrument='CFHT') "
193 "AND tract=8766 AND patch.cell_x > 5 AND "
194 "patch.cell_y < 4 AND abstract_filter='i' "
195 "or visit IN (1..50:2)")
196 tokens = (("LPAREN", "("),
197 ("LPAREN", "("),
198 ("IDENTIFIER", "instrument"),
199 ("EQ", "="),
200 ("STRING_LITERAL", "HSC"),
201 ("AND", "AND"),
202 ("IDENTIFIER", "detector"),
203 ("NE", "!="),
204 ("NUMERIC_LITERAL", "9"),
205 ("RPAREN", ")"),
206 ("OR", "OR"),
207 ("IDENTIFIER", "instrument"),
208 ("EQ", "="),
209 ("STRING_LITERAL", "CFHT"),
210 ("RPAREN", ")"),
211 ("AND", "AND"),
212 ("IDENTIFIER", "tract"),
213 ("EQ", "="),
214 ("NUMERIC_LITERAL", "8766"),
215 ("AND", "AND"),
216 ("IDENTIFIER", "patch.cell_x"),
217 ("GT", ">"),
218 ("NUMERIC_LITERAL", "5"),
219 ("AND", "AND"),
220 ("IDENTIFIER", "patch.cell_y"),
221 ("LT", "<"),
222 ("NUMERIC_LITERAL", "4"),
223 ("AND", "AND"),
224 ("IDENTIFIER", "abstract_filter"),
225 ("EQ", "="),
226 ("STRING_LITERAL", "i"),
227 ("OR", "or"),
228 ("IDENTIFIER", "visit"),
229 ("IN", "IN"),
230 ("LPAREN", "("),
231 ("RANGE_LITERAL", (1, 50, 2)),
232 ("RPAREN", ")"),
233 )
234 lexer.input(expr)
235 for type, value in tokens:
236 self._assertToken(lexer.token(), type, value)
237 self.assertIsNone(lexer.token())
239 def testExceptions(self):
240 """Test for exception contents"""
242 def _assertExc(exc, expr, remain, pos, lineno):
243 """Check exception attribute values"""
244 self.assertEqual(exc.expression, expr)
245 self.assertEqual(exc.remain, remain)
246 self.assertEqual(exc.pos, pos)
247 self.assertEqual(exc.lineno, lineno)
249 lexer = ParserLex.make_lexer()
250 expr = "a.b.c"
251 lexer.input(expr)
252 self._assertToken(lexer.token(), "IDENTIFIER", "a.b")
253 with self.assertRaises(ParserLexError) as catcher:
254 lexer.token()
255 _assertExc(catcher.exception, expr, ".c", 3, 1)
257 lexer = ParserLex.make_lexer()
258 expr = "a \n& b"
259 lexer.input(expr)
260 self._assertToken(lexer.token(), "IDENTIFIER", "a")
261 with self.assertRaises(ParserLexError) as catcher:
262 lexer.token()
263 _assertExc(catcher.exception, expr, "& b", 3, 2)
265 lexer = ParserLex.make_lexer()
266 expr = "a\n=\n1e5.e2"
267 lexer.input(expr)
268 self._assertToken(lexer.token(), "IDENTIFIER", "a")
269 self._assertToken(lexer.token(), "EQ", "=")
270 self._assertToken(lexer.token(), "NUMERIC_LITERAL", "1e5")
271 with self.assertRaises(ParserLexError) as catcher:
272 lexer.token()
273 _assertExc(catcher.exception, expr, ".e2", 7, 3)
275 # zero stride in range literal
276 lexer = ParserLex.make_lexer()
277 expr = "1..2:0"
278 lexer.input(expr)
279 self._assertToken(lexer.token(), "RANGE_LITERAL", (1, 2, None))
280 with self.assertRaises(ParserLexError) as catcher:
281 lexer.token()
282 _assertExc(catcher.exception, expr, ":0", 4, 1)
284 # negative stride in range literal
285 lexer = ParserLex.make_lexer()
286 expr = "1..2:-10"
287 lexer.input(expr)
288 self._assertToken(lexer.token(), "RANGE_LITERAL", (1, 2, None))
289 with self.assertRaises(ParserLexError) as catcher:
290 lexer.token()
291 _assertExc(catcher.exception, expr, ":-10", 4, 1)
294if __name__ == "__main__": 294 ↛ 295line 294 didn't jump to line 295, because the condition on line 294 was never true
295 unittest.main()