Commit c19155e6 authored by Taddeus Kroes's avatar Taddeus Kroes

Generalized PI to SPECIAL_TOKENS, now including INFINITY.

parent 6b59d5de
......@@ -109,3 +109,5 @@ Division of 0 by 1 reduces to 0.
- Create unit tests for node inequivalece operator.
- Line printer: 1 / (n + n)x -> 1 / (n + n) * x
- Parser: 'apia' -> 'aa'
......@@ -52,6 +52,9 @@ OP_REWRITE = 21
# Special identifiers
PI = 'pi'
E = 'e'
INFINITY = 'oo'
SPECIAL_TOKENS = [PI, INFINITY]
# Default base to use in parsing 'log(...)'
DEFAULT_LOGARITHM_BASE = 10
......@@ -430,15 +433,6 @@ class ExpressionLeaf(Leaf, ExpressionBase):
return self.negated == other.negated and self.type == other.type \
and self.value == other.value
def __str__(self):
val = str(self.value)
# Replace PI leaf by the Greek character
if val == PI:
val = u_PI
return '-' * self.negated + val
def __repr__(self):
return str(self)
......
......@@ -16,7 +16,8 @@ from graph_drawing.graph import generate_graph
from node import ExpressionNode as Node, ExpressionLeaf as Leaf, OP_MAP, \
OP_DER, TOKEN_MAP, TYPE_OPERATOR, OP_COMMA, OP_NEG, OP_MUL, OP_DIV, \
OP_LOG, OP_ADD, Scope, PI, E, DEFAULT_LOGARITHM_BASE, OP_VALUE_MAP
OP_LOG, OP_ADD, Scope, E, DEFAULT_LOGARITHM_BASE, OP_VALUE_MAP, \
SPECIAL_TOKENS
from rules import RULES
from strategy import pick_suggestion
from possibilities import filter_duplicates, apply_suggestion
......@@ -48,7 +49,8 @@ class Parser(BisonParser):
# Words to be ignored by preprocessor
words = zip(*filter(lambda (s, op): TOKEN_MAP[op] == 'FUNCTION', \
OP_MAP.iteritems()))[0] + ('raise', 'graph', PI)
OP_MAP.iteritems()))[0] \
+ ('raise', 'graph') + tuple(SPECIAL_TOKENS)
# Output directory of generated pybison files, including a trailing slash.
buildDirectory = PYBISON_BUILD + '/'
......@@ -143,10 +145,11 @@ class Parser(BisonParser):
self.possibilities = []
# Replace known keywords with escape sequences.
words = list(Parser.words)
words = list(self.__class__.words)
words.insert(10, '\n')
for i, keyword in enumerate(words):
# FIXME: Why case-insensitivity?
data = re.sub(keyword, chr(i), data, flags=re.I)
# TODO: remove this quick preprocessing hack. This hack enables
......@@ -164,7 +167,7 @@ class Parser(BisonParser):
+ '|([\x00-\x09\x0b-\x19a-z0-9])\s*(\()' # a( -> a * (
+ '|(\))\s*([\x00-\x09\x0b-\x19a-z0-9])' # )a -> ) * a
+ '|([\x00-\x09\x0b-\x19a-z])\s*'
+ '([\x00-\x09\x0b-\x19a-z]+)' # ab -> a * b
+ '([\x00-\x09\x0b-\x19a-z])' # ab -> a * b
+ '|([0-9])\s*([\x00-\x09\x0b-\x19a-z])' # 4a -> 4 * a
+ '|([\x00-\x09\x0b-\x19a-z])\s*([0-9])' # a4 -> a ^ 4
+ '|([0-9])\s+([0-9]))' # 4 4 -> 4 * 4
......@@ -478,12 +481,15 @@ class Parser(BisonParser):
% (option, target)) # pragma: nocover
# -----------------------------------------
# Special characters and operator tokens
# Special tokens and operator tokens
# -----------------------------------------
operators = '"%s"%s{ returntoken(IDENTIFIER); }\n' \
% (PI, ' ' * (8 - len(PI)))
operators = ''
functions = []
for token in SPECIAL_TOKENS:
operators += '"%s"%s{ returntoken(IDENTIFIER); }\n' \
% (token, ' ' * (8 - len(token)))
for op_str, op in OP_MAP.iteritems():
if TOKEN_MAP[op] == 'FUNCTION':
functions.append(op_str)
......
......@@ -2,7 +2,8 @@
import unittest
from src.parser import Parser
from src.node import ExpressionNode as Node, ExpressionLeaf as Leaf
from src.node import ExpressionNode as Node, ExpressionLeaf as Leaf, \
SPECIAL_TOKENS
from tests.parser import ParserWrapper, run_expressions, line, graph
from tests.rulestestcase import tree
from src.rules.goniometry import sin, cos
......@@ -89,3 +90,10 @@ class TestParser(unittest.TestCase):
self.assertEqual(tree('log_10(x)'), log(x))
self.assertEqual(tree('log_g(x)'), log(x, g))
self.assertEqual(tree('log_g x'), log(x, g))
def test_special_tokens(self):
for token in SPECIAL_TOKENS:
self.assertEqual(tree(token), Leaf(token))
a, t = Leaf('a'), Leaf(token)
self.assertEqual(tree('a' + token), a * t)
# FIXME: self.assertEqual(tree('a' + token + 'a'), a * t * a)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment