Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
T
trs
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Taddeüs Kroes
trs
Commits
c19155e6
Commit
c19155e6
authored
Mar 21, 2012
by
Taddeus Kroes
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Generalized PI to SPECIAL_TOKENS, now including INFINITY.
parent
6b59d5de
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
27 additions
and
17 deletions
+27
-17
TODO
TODO
+2
-0
src/node.py
src/node.py
+3
-9
src/parser.py
src/parser.py
+13
-7
tests/test_parser.py
tests/test_parser.py
+9
-1
No files found.
TODO
View file @
c19155e6
...
...
@@ -109,3 +109,5 @@ Division of 0 by 1 reduces to 0.
- Create unit tests for node inequivalece operator.
- Line printer: 1 / (n + n)x -> 1 / (n + n) * x
- Parser: 'apia' -> 'aa'
src/node.py
View file @
c19155e6
...
...
@@ -52,6 +52,9 @@ OP_REWRITE = 21
# Special identifiers
PI
=
'pi'
E
=
'e'
INFINITY
=
'oo'
SPECIAL_TOKENS
=
[
PI
,
INFINITY
]
# Default base to use in parsing 'log(...)'
DEFAULT_LOGARITHM_BASE
=
10
...
...
@@ -430,15 +433,6 @@ class ExpressionLeaf(Leaf, ExpressionBase):
return
self
.
negated
==
other
.
negated
and
self
.
type
==
other
.
type
\
and
self
.
value
==
other
.
value
def
__str__
(
self
):
val
=
str
(
self
.
value
)
# Replace PI leaf by the Greek character
if
val
==
PI
:
val
=
u_PI
return
'-'
*
self
.
negated
+
val
def
__repr__
(
self
):
return
str
(
self
)
...
...
src/parser.py
View file @
c19155e6
...
...
@@ -16,7 +16,8 @@ from graph_drawing.graph import generate_graph
from
node
import
ExpressionNode
as
Node
,
ExpressionLeaf
as
Leaf
,
OP_MAP
,
\
OP_DER
,
TOKEN_MAP
,
TYPE_OPERATOR
,
OP_COMMA
,
OP_NEG
,
OP_MUL
,
OP_DIV
,
\
OP_LOG
,
OP_ADD
,
Scope
,
PI
,
E
,
DEFAULT_LOGARITHM_BASE
,
OP_VALUE_MAP
OP_LOG
,
OP_ADD
,
Scope
,
E
,
DEFAULT_LOGARITHM_BASE
,
OP_VALUE_MAP
,
\
SPECIAL_TOKENS
from
rules
import
RULES
from
strategy
import
pick_suggestion
from
possibilities
import
filter_duplicates
,
apply_suggestion
...
...
@@ -48,7 +49,8 @@ class Parser(BisonParser):
# Words to be ignored by preprocessor
words
=
zip
(
*
filter
(
lambda
(
s
,
op
):
TOKEN_MAP
[
op
]
==
'FUNCTION'
,
\
OP_MAP
.
iteritems
()))[
0
]
+
(
'raise'
,
'graph'
,
PI
)
OP_MAP
.
iteritems
()))[
0
]
\
+
(
'raise'
,
'graph'
)
+
tuple
(
SPECIAL_TOKENS
)
# Output directory of generated pybison files, including a trailing slash.
buildDirectory
=
PYBISON_BUILD
+
'/'
...
...
@@ -143,10 +145,11 @@ class Parser(BisonParser):
self
.
possibilities
=
[]
# Replace known keywords with escape sequences.
words
=
list
(
Parser
.
words
)
words
=
list
(
self
.
__class__
.
words
)
words
.
insert
(
10
,
'
\
n
'
)
for
i
,
keyword
in
enumerate
(
words
):
# FIXME: Why case-insensitivity?
data
=
re
.
sub
(
keyword
,
chr
(
i
),
data
,
flags
=
re
.
I
)
# TODO: remove this quick preprocessing hack. This hack enables
...
...
@@ -164,7 +167,7 @@ class Parser(BisonParser):
+ '
|
([
\
x00
-
\
x09
\
x0b
-
\
x19a
-
z0
-
9
])
\
s
*
(
\
()
' # a( -> a * (
+ '
|
(
\
))
\
s
*
([
\
x00
-
\
x09
\
x0b
-
\
x19a
-
z0
-
9
])
' # )a -> ) * a
+ '
|
([
\
x00
-
\
x09
\
x0b
-
\
x19a
-
z
])
\
s
*
'
+ '
([
\
x00
-
\
x09
\
x0b
-
\
x19a
-
z
]
+
)
'
# ab -> a * b
+ '
([
\
x00
-
\
x09
\
x0b
-
\
x19a
-
z
]
)
'
# ab -> a * b
+ '
|
([
0
-
9
])
\
s
*
([
\
x00
-
\
x09
\
x0b
-
\
x19a
-
z
])
' # 4a -> 4 * a
+ '
|
([
\
x00
-
\
x09
\
x0b
-
\
x19a
-
z
])
\
s
*
([
0
-
9
])
' # a4 -> a ^ 4
+ '
|
([
0
-
9
])
\
s
+
([
0
-
9
]))
' # 4 4 -> 4 * 4
...
...
@@ -478,12 +481,15 @@ class Parser(BisonParser):
%
(
option
,
target
))
# pragma: nocover
# -----------------------------------------
# Special
character
s and operator tokens
# Special
token
s and operator tokens
# -----------------------------------------
operators
=
'"%s"%s{ returntoken(IDENTIFIER); }
\
n
'
\
%
(
PI
,
' '
*
(
8
-
len
(
PI
)))
operators
=
''
functions
=
[]
for
token
in
SPECIAL_TOKENS
:
operators
+=
'"%s"%s{ returntoken(IDENTIFIER); }
\
n
'
\
%
(
token
,
' '
*
(
8
-
len
(
token
)))
for
op_str
,
op
in
OP_MAP
.
iteritems
():
if
TOKEN_MAP
[
op
]
==
'FUNCTION'
:
functions
.
append
(
op_str
)
...
...
tests/test_parser.py
View file @
c19155e6
...
...
@@ -2,7 +2,8 @@
import
unittest
from
src.parser
import
Parser
from
src.node
import
ExpressionNode
as
Node
,
ExpressionLeaf
as
Leaf
from
src.node
import
ExpressionNode
as
Node
,
ExpressionLeaf
as
Leaf
,
\
SPECIAL_TOKENS
from
tests.parser
import
ParserWrapper
,
run_expressions
,
line
,
graph
from
tests.rulestestcase
import
tree
from
src.rules.goniometry
import
sin
,
cos
...
...
@@ -89,3 +90,10 @@ class TestParser(unittest.TestCase):
self
.
assertEqual
(
tree
(
'log_10(x)'
),
log
(
x
))
self
.
assertEqual
(
tree
(
'log_g(x)'
),
log
(
x
,
g
))
self
.
assertEqual
(
tree
(
'log_g x'
),
log
(
x
,
g
))
def
test_special_tokens
(
self
):
for
token
in
SPECIAL_TOKENS
:
self
.
assertEqual
(
tree
(
token
),
Leaf
(
token
))
a
,
t
=
Leaf
(
'a'
),
Leaf
(
token
)
self
.
assertEqual
(
tree
(
'a'
+
token
),
a
*
t
)
# FIXME: self.assertEqual(tree('a' + token + 'a'), a * t * a)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment