Coverage for python/lsst/daf/butler/registry/queries/exprParser/parserYacc.py : 21%

Hot-keys on this page
r m x p toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# This file is part of daf_butler.
2#
3# Developed for the LSST Data Management System.
4# This product includes software developed by the LSST Project
5# (https://www.lsst.org).
6# See the COPYRIGHT file at the top-level directory of this distribution
7# for details of code ownership.
8#
9# This program is free software: you can redistribute it and/or modify
10# it under the terms of the GNU General Public License as published by
11# the Free Software Foundation, either version 3 of the License, or
12# (at your option) any later version.
13#
14# This program is distributed in the hope that it will be useful,
15# but WITHOUT ANY WARRANTY; without even the implied warranty of
16# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17# GNU General Public License for more details.
18#
19# You should have received a copy of the GNU General Public License
20# along with this program. If not, see <https://www.gnu.org/licenses/>.
22# type: ignore
24"""Syntax definition for user expression parser.
25"""
27__all__ = ["ParserYacc", "ParserYaccError", "ParseError", "ParserEOFError"]
29# -------------------------------
30# Imports of standard modules --
31# -------------------------------
32import re
34# -----------------------------
35# Imports for other modules --
36# -----------------------------
37import astropy.time
38from .exprTree import (BinaryOp, Identifier, IsIn, NumericLiteral, Parens,
39 RangeLiteral, StringLiteral, TimeLiteral, UnaryOp)
40from .ply import yacc
41from .parserLex import ParserLex
43# ----------------------------------
44# Local non-exported definitions --
45# ----------------------------------
47# The purpose of this regex is to guess time format if it is not explicitly
48# provided in the string itself
49_re_time_str = re.compile(r"""
50 ((?P<format>\w+)/)? # optionally prefixed by "format/"
51 (?P<value>
52 (?P<number>-?(\d+(\.\d*)|(\.\d+))) # floating point number
53 |
54 (?P<iso>\d+-\d+-\d+([ T]\d+:\d+(:\d+([.]\d*)?)?)?) # iso(t)
55 |
56 (?P<fits>[+]\d+-\d+-\d+(T\d+:\d+:\d+([.]\d*)?)?) # fits
57 |
58 (?P<yday>\d+:\d+(:\d+:\d+(:\d+([.]\d*)?)?)?) # yday
59 )
60 (/(?P<scale>\w+))? # optionally followed by "/scale"
61 $
62""", re.VERBOSE | re.IGNORECASE)
65def _parseTimeString(time_str):
66 """Try to convert time string into astropy.Time.
68 Parameters
69 ----------
70 time_str : `str`
71 Input string.
73 Returns
74 -------
75 time : `astropy.time.Time`
77 Raises
78 ------
79 ValueError
80 Raised if input string has unexpected format
81 """
82 match = _re_time_str.match(time_str)
83 if not match:
84 raise ValueError(f"Time string \"{time_str}\" does not match known formats")
86 value, fmt, scale = match.group("value", "format", "scale")
87 if fmt is not None:
88 fmt = fmt.lower()
89 if fmt not in astropy.time.Time.FORMATS:
90 raise ValueError(f"Time string \"{time_str}\" specifies unknown time format \"{fmt}\"")
91 if scale is not None:
92 scale = scale.lower()
93 if scale not in astropy.time.Time.SCALES:
94 raise ValueError(f"Time string \"{time_str}\" specifies unknown time scale \"{scale}\"")
96 # convert number string to floating point
97 if match.group("number") is not None:
98 value = float(value)
100 # guess format if not given
101 if fmt is None:
102 if match.group("number") is not None:
103 fmt = "mjd"
104 elif match.group("iso") is not None:
105 if "T" in value or "t" in value:
106 fmt = "isot"
107 else:
108 fmt = "iso"
109 elif match.group("fits") is not None:
110 fmt = "fits"
111 elif match.group("yday") is not None:
112 fmt = "yday"
113 assert fmt is not None
115 # guess scale if not given
116 if scale is None:
117 if fmt in ("iso", "isot", "fits", "yday", "unix"):
118 scale = "utc"
119 elif fmt == "cxcsec":
120 scale = "tt"
121 else:
122 scale = "tai"
124 try:
125 value = astropy.time.Time(value, format=fmt, scale=scale)
126 except ValueError:
127 # astropy makes very verbose exception that is not super-useful in
128 # many context, just say we don't like it.
129 raise ValueError(f"Time string \"{time_str}\" does not match format \"{fmt}\"") from None
131 return value
133# ------------------------
134# Exported definitions --
135# ------------------------
138class ParserYaccError(Exception):
139 """Base class for exceptions generated by parser.
140 """
141 pass
144class ParseError(ParserYaccError):
145 """Exception raised for parsing errors.
147 Attributes
148 ----------
149 expression : str
150 Full initial expression being parsed
151 token : str
152 Current token at parsing position
153 pos : int
154 Current parsing position, offset from beginning of expression in
155 characters
156 lineno : int
157 Current line number in the expression
158 posInLine : int
159 Parsing position in current line, 0-based
160 """
162 def __init__(self, expression, token, pos, lineno):
163 self.expression = expression
164 self.token = token
165 self.pos = pos
166 self.lineno = lineno
167 self.posInLine = self._posInLine()
168 msg = "Syntax error at or near '{0}' (line: {1}, pos: {2})"
169 msg = msg.format(token, lineno, self.posInLine + 1)
170 ParserYaccError.__init__(self, msg)
172 def _posInLine(self):
173 """Return position in current line"""
174 lines = self.expression.split('\n')
175 pos = self.pos
176 for line in lines[:self.lineno - 1]:
177 # +1 for newline
178 pos -= len(line) + 1
179 return pos
182class ParserEOFError(ParserYaccError):
183 """Exception raised for EOF-during-parser.
184 """
186 def __init__(self):
187 Exception.__init__(self,
188 "End of input reached while expecting further input")
191class ParserYacc:
192 """Class which defines PLY grammar.
193 """
195 def __init__(self, **kwargs):
197 kw = dict(write_tables=0, debug=False)
198 kw.update(kwargs)
200 self.parser = yacc.yacc(module=self, **kw)
202 def parse(self, input, lexer=None, debug=False, tracking=False):
203 """Parse input expression ad return parsed tree object.
205 This is a trivial wrapper for yacc.LRParser.parse method which
206 provides lexer if not given in arguments.
208 Parameters
209 ----------
210 input : str
211 Expression to parse
212 lexer : object, optional
213 Lexer instance, if not given then ParserLex.make_lexer() is
214 called to create one.
215 debug : bool, optional
216 Set to True for debugging output.
217 tracking : bool, optional
218 Set to True for tracking line numbers in parser.
219 """
220 # make lexer
221 if lexer is None:
222 lexer = ParserLex.make_lexer()
223 tree = self.parser.parse(input=input, lexer=lexer, debug=debug,
224 tracking=tracking)
225 return tree
227 tokens = ParserLex.tokens[:]
229 precedence = (
230 ('left', 'OR'),
231 ('left', 'AND'),
232 ('nonassoc', 'EQ', 'NE'), # Nonassociative operators
233 ('nonassoc', 'LT', 'LE', 'GT', 'GE'), # Nonassociative operators
234 ('left', 'ADD', 'SUB'),
235 ('left', 'MUL', 'DIV', 'MOD'),
236 ('right', 'UPLUS', 'UMINUS', 'NOT'), # unary plus and minus
237 )
239 # this is the starting rule
240 def p_input(self, p):
241 """ input : expr
242 | empty
243 """
244 p[0] = p[1]
246 def p_empty(self, p):
247 """ empty :
248 """
249 p[0] = None
251 def p_expr(self, p):
252 """ expr : expr OR expr
253 | expr AND expr
254 | NOT expr
255 | bool_primary
256 """
257 if len(p) == 4:
258 p[0] = BinaryOp(lhs=p[1], op=p[2].upper(), rhs=p[3])
259 elif len(p) == 3:
260 p[0] = UnaryOp(op=p[1].upper(), operand=p[2])
261 else:
262 p[0] = p[1]
264 def p_bool_primary(self, p):
265 """ bool_primary : bool_primary EQ predicate
266 | bool_primary NE predicate
267 | bool_primary LT predicate
268 | bool_primary LE predicate
269 | bool_primary GE predicate
270 | bool_primary GT predicate
271 | predicate
272 """
273 if len(p) == 2:
274 p[0] = p[1]
275 else:
276 p[0] = BinaryOp(lhs=p[1], op=p[2], rhs=p[3])
278 def p_predicate(self, p):
279 """ predicate : bit_expr IN LPAREN literal_list RPAREN
280 | bit_expr NOT IN LPAREN literal_list RPAREN
281 | bit_expr
282 """
283 if len(p) == 6:
284 p[0] = IsIn(lhs=p[1], values=p[4])
285 elif len(p) == 7:
286 p[0] = IsIn(lhs=p[1], values=p[5], not_in=True)
287 else:
288 p[0] = p[1]
290 def p_literal_list(self, p):
291 """ literal_list : literal_list COMMA literal
292 | literal
293 """
294 if len(p) == 2:
295 p[0] = [p[1]]
296 else:
297 p[0] = p[1] + [p[3]]
299 def p_bit_expr(self, p):
300 """ bit_expr : bit_expr ADD bit_expr
301 | bit_expr SUB bit_expr
302 | bit_expr MUL bit_expr
303 | bit_expr DIV bit_expr
304 | bit_expr MOD bit_expr
305 | simple_expr
306 """
307 if len(p) == 2:
308 p[0] = p[1]
309 else:
310 p[0] = BinaryOp(lhs=p[1], op=p[2], rhs=p[3])
312 def p_simple_expr_lit(self, p):
313 """ simple_expr : literal
314 """
315 p[0] = p[1]
317 def p_simple_expr_id(self, p):
318 """ simple_expr : IDENTIFIER
319 """
320 p[0] = Identifier(p[1])
322 def p_simple_expr_unary(self, p):
323 """ simple_expr : ADD simple_expr %prec UPLUS
324 | SUB simple_expr %prec UMINUS
325 """
326 p[0] = UnaryOp(op=p[1], operand=p[2])
328 def p_simple_expr_paren(self, p):
329 """ simple_expr : LPAREN expr RPAREN
330 """
331 p[0] = Parens(p[2])
333 def p_literal_num(self, p):
334 """ literal : NUMERIC_LITERAL
335 """
336 p[0] = NumericLiteral(p[1])
338 def p_literal_num_signed(self, p):
339 """ literal : ADD NUMERIC_LITERAL %prec UPLUS
340 | SUB NUMERIC_LITERAL %prec UMINUS
341 """
342 p[0] = NumericLiteral(p[1] + p[2])
344 def p_literal_str(self, p):
345 """ literal : STRING_LITERAL
346 """
347 p[0] = StringLiteral(p[1])
349 def p_literal_time(self, p):
350 """ literal : TIME_LITERAL
351 """
352 try:
353 value = _parseTimeString(p[1])
354 except ValueError:
355 raise ParseError(p.lexer.lexdata, p[1], p.lexpos(1), p.lineno(1))
356 p[0] = TimeLiteral(value)
358 def p_literal_range(self, p):
359 """ literal : RANGE_LITERAL
360 """
361 # RANGE_LITERAL value is tuple of three numbers
362 start, stop, stride = p[1]
363 p[0] = RangeLiteral(start, stop, stride)
365 # ---------- end of all grammar rules ----------
367 # Error rule for syntax errors
368 def p_error(self, p):
369 if p is None:
370 raise ParserEOFError()
371 else:
372 raise ParseError(p.lexer.lexdata, p.value, p.lexpos, p.lineno)