Commit 54067813 authored by Taddeus Kroes's avatar Taddeus Kroes

Moved token definitions to operator defs in node.py.

parent 04779034
...@@ -29,14 +29,21 @@ OP_MOD = 7 ...@@ -29,14 +29,21 @@ OP_MOD = 7
# N-ary (functions) # N-ary (functions)
OP_INT = 8 OP_INT = 8
OP_EXPAND = 9 OP_COMMA = 9
OP_COMMA = 10 OP_SQRT = 10
OP_SQRT = 11
# Goniometry # Goniometry
OP_SIN = 12 OP_SIN = 11
OP_COS = 13 OP_COS = 12
OP_TAN = 14 OP_TAN = 13
OP_SOLVE = 14
OP_EQ = 15
OP_POSSIBILITIES = 16
OP_HINT = 17
OP_REWRITE_ALL = 18
OP_REWRITE = 19
TYPE_MAP = { TYPE_MAP = {
...@@ -46,19 +53,45 @@ TYPE_MAP = { ...@@ -46,19 +53,45 @@ TYPE_MAP = {
} }
OP_MAP = { OP_MAP = {
',': OP_COMMA,
'+': OP_ADD, '+': OP_ADD,
# Either substraction or negation. Skip the operator sign in 'x' (= 2).
'-': OP_SUB, '-': OP_SUB,
'*': OP_MUL, '*': OP_MUL,
'/': OP_DIV, '/': OP_DIV,
'^': OP_POW, '^': OP_POW,
'mod': OP_MOD, 'sin': OP_SIN,
'int': OP_INT, 'cos': OP_COS,
'expand': OP_EXPAND, 'tan': OP_TAN,
'sqrt': OP_SQRT, 'sqrt': OP_SQRT,
',': OP_COMMA, 'int': OP_INT,
'solve': OP_SOLVE,
'=': OP_EQ,
'??': OP_POSSIBILITIES,
'?': OP_HINT,
'@@': OP_REWRITE_ALL,
'@': OP_REWRITE,
} }
TOKEN_MAP = {
OP_COMMA: 'COMMA',
OP_ADD: 'PLUS',
OP_SUB: 'MINUS',
OP_MUL: 'TIMES',
OP_DIV: 'DIVIDE',
OP_POW: 'POW',
OP_SQRT: 'SQRT',
OP_SIN: 'SIN',
OP_COS: 'COS',
OP_TAN: 'TAN',
OP_INT: 'INT',
OP_SOLVE: 'SOLVE',
OP_EQ: 'EQ',
OP_POSSIBILITIES: 'POSSIBILITIES',
OP_HINT: 'HINT',
OP_REWRITE_ALL: 'REWRITE_ALL',
OP_REWRITE: 'REWRITE',
}
def to_expression(obj): def to_expression(obj):
return obj if isinstance(obj, ExpressionBase) else ExpressionLeaf(obj) return obj if isinstance(obj, ExpressionBase) else ExpressionLeaf(obj)
......
...@@ -3,8 +3,6 @@ This parser will parse the given input and build an expression tree. Grammar ...@@ -3,8 +3,6 @@ This parser will parse the given input and build an expression tree. Grammar
file for the supported mathematical expressions. file for the supported mathematical expressions.
""" """
from node import ExpressionNode as Node, ExpressionLeaf as Leaf
import os.path import os.path
PYBISON_BUILD = os.path.realpath('build/external/pybison') PYBISON_BUILD = os.path.realpath('build/external/pybison')
EXTERNAL_MODS = os.path.realpath('external') EXTERNAL_MODS = os.path.realpath('external')
...@@ -16,7 +14,8 @@ sys.path.insert(1, EXTERNAL_MODS) ...@@ -16,7 +14,8 @@ sys.path.insert(1, EXTERNAL_MODS)
from pybison import BisonParser, BisonSyntaxError from pybison import BisonParser, BisonSyntaxError
from graph_drawing.graph import generate_graph from graph_drawing.graph import generate_graph
from node import TYPE_OPERATOR, OP_COMMA, OP_NEG, OP_MUL, Scope from node import ExpressionNode as Node, ExpressionLeaf as Leaf, OP_MAP, \
TOKEN_MAP, TYPE_OPERATOR, OP_COMMA, OP_NEG, OP_MUL, Scope
from rules import RULES from rules import RULES
from possibilities import filter_duplicates, pick_suggestion, apply_suggestion from possibilities import filter_duplicates, pick_suggestion, apply_suggestion
...@@ -52,10 +51,8 @@ class Parser(BisonParser): ...@@ -52,10 +51,8 @@ class Parser(BisonParser):
# ---------------------------------------------------------------- # ----------------------------------------------------------------
# TODO: add a runtime check to verify that this token list match the list # TODO: add a runtime check to verify that this token list match the list
# of tokens of the lex script. # of tokens of the lex script.
tokens = ['NUMBER', 'IDENTIFIER', 'POSSIBILITIES', tokens = ['NUMBER', 'IDENTIFIER', 'NEWLINE', 'QUIT', 'RAISE', 'GRAPH', \
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'POW', 'LPAREN', 'RPAREN'] + TOKEN_MAP.values()
'LPAREN', 'RPAREN', 'COMMA', 'HINT', 'REWRITE',
'NEWLINE', 'QUIT', 'RAISE', 'GRAPH', 'SQRT']
# ------------------------------ # ------------------------------
# precedences # precedences
...@@ -393,6 +390,15 @@ class Parser(BisonParser): ...@@ -393,6 +390,15 @@ class Parser(BisonParser):
raise BisonSyntaxError('Unsupported option %d in target "%s".' raise BisonSyntaxError('Unsupported option %d in target "%s".'
% (option, target)) # pragma: nocover % (option, target)) # pragma: nocover
# -----------------------------------------
# operator tokens
# -----------------------------------------
operators = ''
for op_str, op in OP_MAP.iteritems():
operators += '"%s"%s{ returntoken(%s); }\n' \
% (op_str, ' ' * (8 - len(op_str)), TOKEN_MAP[op])
# ----------------------------------------- # -----------------------------------------
# raw lex script, verbatim here # raw lex script, verbatim here
# ----------------------------------------- # -----------------------------------------
...@@ -430,19 +436,11 @@ class Parser(BisonParser): ...@@ -430,19 +436,11 @@ class Parser(BisonParser):
[a-zA-Z] { returntoken(IDENTIFIER); } [a-zA-Z] { returntoken(IDENTIFIER); }
"(" { returntoken(LPAREN); } "(" { returntoken(LPAREN); }
")" { returntoken(RPAREN); } ")" { returntoken(RPAREN); }
"+" { returntoken(PLUS); }
"-" { returntoken(MINUS); }
"*" { returntoken(TIMES); }
"^" { returntoken(POW); }
"/" { returntoken(DIVIDE); }
"," { returntoken(COMMA); } "," { returntoken(COMMA); }
"??" { returntoken(POSSIBILITIES); } """ + operators + r"""
"?" { returntoken(HINT); }
"@" { returntoken(REWRITE); }
"quit" { yyterminate(); returntoken(QUIT); }
"raise" { returntoken(RAISE); } "raise" { returntoken(RAISE); }
"graph" { returntoken(GRAPH); } "graph" { returntoken(GRAPH); }
"sqrt" { returntoken(SQRT); } "quit" { yyterminate(); returntoken(QUIT); }
[ \t\v\f] { } [ \t\v\f] { }
[\n] { yycolumn = 0; returntoken(NEWLINE); } [\n] { yycolumn = 0; returntoken(NEWLINE); }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment