parser.py 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691
  1. """
  2. This parser will parse the given input and build an expression tree. Grammar
  3. file for the supported mathematical expressions.
  4. """
  5. import os.path
  6. PYBISON_BUILD = os.path.realpath('build/external/pybison')
  7. EXTERNAL_MODS = os.path.realpath('external')
  8. import sys
  9. sys.path.insert(0, PYBISON_BUILD)
  10. sys.path.insert(1, EXTERNAL_MODS)
  11. from pybison import BisonParser, BisonSyntaxError
  12. from graph_drawing.graph import generate_graph
  13. from node import ExpressionNode as Node, \
  14. ExpressionLeaf as Leaf, OP_MAP, OP_DER, TOKEN_MAP, TYPE_OPERATOR, \
  15. OP_COMMA, OP_MUL, OP_POW, OP_LOG, OP_ADD, Scope, E, OP_ABS, \
  16. DEFAULT_LOGARITHM_BASE, OP_VALUE_MAP, SPECIAL_TOKENS, OP_INT, \
  17. OP_INT_INDEF, negation_to_node
  18. from rules.utils import find_variable
  19. from rules.precedences import IMPLICIT_RULES
  20. from strategy import find_possibilities
  21. from possibilities import apply_suggestion
  22. import Queue
  23. import re
  24. # Check for n-ary operator in child nodes
  25. def combine(op, op_type, *nodes):
  26. # At least return the operator.
  27. res = [op]
  28. for n in nodes:
  29. # Merge the children for all nodes which have the same operator.
  30. if n.type == TYPE_OPERATOR and n.op == op_type:
  31. res += n.nodes
  32. else:
  33. res.append(n)
  34. return res
  35. def find_integration_variable(exp):
  36. if not exp.is_op(OP_MUL):
  37. return exp, find_variable(exp)
  38. scope = Scope(exp)
  39. if len(scope) > 2 and scope[-2] == 'd' and scope[-1].is_identifier():
  40. x = scope[-1]
  41. scope.nodes = scope[:-2]
  42. return scope.as_nary_node(), x
  43. return exp, find_variable(exp)
  44. class Parser(BisonParser):
  45. """
  46. Implements the calculator parser. Grammar rules are defined in the method
  47. docstrings. Scanner rules are in the 'lexscript' attribute.
  48. """
  49. # Words to be ignored by preprocessor
  50. words = tuple(filter(lambda w: w.isalpha(), OP_MAP.iterkeys())) \
  51. + ('raise', 'graph') + tuple(SPECIAL_TOKENS)
  52. # Output directory of generated pybison files, including a trailing slash.
  53. buildDirectory = PYBISON_BUILD + '/'
  54. # ----------------------------------------------------------------
  55. # lexer tokens - these must match those in your lex script (below)
  56. # ----------------------------------------------------------------
  57. # TODO: add a runtime check to verify that this token list match the list
  58. # of tokens of the lex script.
  59. tokens = ['NUMBER', 'IDENTIFIER', 'NEWLINE', 'QUIT', 'RAISE', 'GRAPH',
  60. 'LPAREN', 'RPAREN', 'FUNCTION', 'FUNCTION_LPAREN', 'LBRACKET',
  61. 'RBRACKET', 'PIPE', 'PRIME', 'DERIVATIVE'] \
  62. + filter(lambda t: t != 'FUNCTION', TOKEN_MAP.values())
  63. # ------------------------------
  64. # precedences
  65. # ------------------------------
  66. precedences = (
  67. ('left', ('COMMA', )),
  68. ('left', ('OR', )),
  69. ('left', ('AND', )),
  70. ('left', ('EQ', )),
  71. ('left', ('MINUS', 'PLUS', 'NEG')),
  72. ('left', ('INTEGRAL', 'DERIVATIVE')),
  73. ('left', ('TIMES', )),
  74. ('left', ('DIVIDE', )),
  75. ('right', ('FUNCTION', )),
  76. ('right', ('POW', )),
  77. ('left', ('SUB', )),
  78. ('right', ('FUNCTION_LPAREN', )),
  79. )
  80. interactive = 0
  81. def __init__(self, **kwargs):
  82. BisonParser.__init__(self, **kwargs)
  83. self.interactive = kwargs.get('interactive', 0)
  84. self.timeout = kwargs.get('timeout', 0)
  85. self.root_node = None
  86. self.possibilities = None
  87. self.reset()
  88. def reset(self):
  89. self.read_buffer = ''
  90. self.read_queue = Queue.Queue()
  91. #self.subtree_map = {}
  92. self.set_root_node(None)
  93. self.possibilities = None
  94. def run(self, *args, **kwargs):
  95. self.reset()
  96. return super(Parser, self).run(*args, **kwargs)
  97. # Override default read method with a version that prompts for input.
  98. def read(self, nbytes):
  99. if self.file == sys.stdin and self.file.closed:
  100. return ''
  101. if not self.read_buffer and not self.read_queue.empty():
  102. self.read_buffer = self.read_queue.get_nowait() + '\n'
  103. if self.read_buffer:
  104. read_buffer = self.read_buffer[:nbytes]
  105. self.read_buffer = self.read_buffer[nbytes:]
  106. return read_buffer
  107. try:
  108. read_buffer = raw_input('>>> ' if self.interactive else '') + '\n'
  109. except EOFError:
  110. return ''
  111. self.read_buffer = read_buffer[nbytes:]
  112. return read_buffer[:nbytes]
  113. def hook_read_before(self):
  114. pass
  115. def hook_read_after(self, data):
  116. """
  117. This hook will be called when the read() method returned. The data
  118. argument points to the data read by the read() method. This hook
  119. function should return the data to be used by the parser.
  120. """
  121. if not data.strip():
  122. return data
  123. # Replace known keywords with escape sequences.
  124. words = list(self.__class__.words)
  125. words.insert(10, '\n')
  126. for i, keyword in enumerate(words):
  127. # FIXME: Why case-insensitivity?
  128. data = re.sub(keyword, chr(i), data, flags=re.I)
  129. # TODO: remove this quick preprocessing hack. This hack enables
  130. # concatenated expressions, since the grammar currently does not
  131. # support those. This workaround will replace:
  132. # - ")(" with ")*(".
  133. # - "a(" with "a*(".
  134. # - ")a" with ")*a".
  135. # - "ab" with "a*b".
  136. # - "4a" with "4*a".
  137. # - "a4" with "a^4".
  138. pattern = ('(?:(\))\s*([([])' # )( -> ) * (
  139. # )[ -> ) * [
  140. + '|([\x00-\x09\x0b-\x19a-z0-9])\s*([([])' # a( -> a * (
  141. # a[ -> a * [
  142. + '|(\))\s*([\x00-\x09\x0b-\x19a-z0-9])' # )a -> ) * a
  143. + '|([\x00-\x09\x0b-\x19a-z])\s*'
  144. + '([\x00-\x09\x0b-\x19a-z0-9])' # ab -> a * b
  145. + '|(\|)(\|)' # || -> | * |
  146. + '|([0-9])\s*([\x00-\x09\x0b-\x19a-z])' # 4a -> 4 * a
  147. + '|([\x00-\x09\x0b-\x19a-z])([0-9])' # a4 -> a ^ 4
  148. + '|([\x00-\x09\x0b-\x190-9])(\s+[0-9]))' # 4 4 -> 4 * 4
  149. )
  150. def preprocess_data(match):
  151. left, right = filter(None, match.groups())
  152. # Make sure there are no multiplication and exponentiation signs
  153. # inserted between a function and its argument(s): "sin x" should
  154. # not be written as "sin*x", because that is bogus.
  155. if ord(left) <= 0x9 or 0x0b <= ord(left) <= 0x19:
  156. return left + ' ' + right
  157. # If all characters on the right are numbers. e.g. "a4", the
  158. # expression implies exponentiation. Make sure ")4" is not
  159. # converted into an exponentiation, because that's multiplication.
  160. #if left != ')' and not left.isdigit() and right.isdigit():
  161. # return '%s^%s' % (left, right)
  162. # match: ab | abc | abcd (where left = "a")
  163. return '*'.join([left] + list(re.sub(r'^ +', '', right)))
  164. if self.verbose: # pragma: nocover
  165. data_before = data
  166. # Iteratively replace all matches.
  167. i = 0
  168. while i < len(data):
  169. data = data[:i] + re.sub(pattern, preprocess_data, data[i:])
  170. i += 1
  171. # Replace escape sequences with original keywords.
  172. for i, keyword in enumerate(words):
  173. data = data.replace(chr(i), keyword)
  174. # Remove TIMES operators around OR that the preprocessor put there
  175. data = re.sub(r'\*?vv\*?', 'vv', data)
  176. # Add parentheses to integrals with matching 'dx' so that the 'dx' acts
  177. # as a right parenthesis for the integral function
  178. data = re.sub(r'(int(?:_.+\^.+\*)?)(.+?)(\*d\*[a-z])',
  179. '\\1(\\2)\\3', data)
  180. if self.verbose and data_before != data: # pragma: nocover
  181. print 'hook_read_after() modified the input data:'
  182. print 'before:', repr(data_before)
  183. print 'after :', repr(data)
  184. return data
  185. def hook_handler(self, target, option, names, values, retval):
  186. return retval
  187. def set_root_node(self, node):
  188. self.root_node = node
  189. self.possibilities = None
  190. def find_possibilities(self):
  191. if not self.root_node:
  192. raise RuntimeError('No expression')
  193. if self.possibilities is not None:
  194. if self.verbose:
  195. print 'Expression has not changed, not updating possibilities'
  196. return
  197. self.possibilities = find_possibilities(self.root_node)
  198. def display_hint(self):
  199. self.find_possibilities()
  200. if self.interactive:
  201. if self.possibilities:
  202. print self.possibilities[0]
  203. else:
  204. print 'No further reduction is possible.'
  205. def display_possibilities(self):
  206. self.find_possibilities()
  207. for i, p in enumerate(self.possibilities):
  208. print '%d %s' % (i, p)
  209. def rewrite(self, index=0, verbose=False, check_implicit=True):
  210. self.find_possibilities()
  211. if not self.possibilities:
  212. return
  213. suggestion = self.possibilities[index]
  214. if self.verbose:
  215. print 'EXPLICIT:', suggestion
  216. elif verbose:
  217. print suggestion
  218. self.set_root_node(apply_suggestion(self.root_node, suggestion))
  219. if self.verbose:
  220. print ' ', self.root_node
  221. # Only apply any remaining implicit hints if the suggestion itself is
  222. # not implicit
  223. if check_implicit and suggestion.handler not in IMPLICIT_RULES:
  224. self.find_possibilities()
  225. while self.possibilities:
  226. # Find the first implicit possibliity in the list
  227. # FIXME: Is it smart to apply a rule that is not a hint?
  228. sugg = None
  229. for pos in self.possibilities:
  230. if pos.handler in IMPLICIT_RULES:
  231. sugg = pos
  232. break
  233. if not sugg:
  234. break
  235. if self.verbose:
  236. print 'IMPLICIT:', sugg
  237. self.set_root_node(apply_suggestion(self.root_node, sugg))
  238. if self.verbose:
  239. print ' ', self.root_node
  240. self.find_possibilities()
  241. if verbose and not self.verbose:
  242. print self.root_node
  243. return self.root_node
  244. def rewrite_all(self, verbose=False):
  245. i = 0
  246. while self.rewrite(verbose=verbose):
  247. i += 1
  248. if i > 100:
  249. print 'Too many rewrite steps, aborting...'
  250. break
  251. if not verbose or not i:
  252. return self.root_node
  253. #def hook_run(self, filename, retval):
  254. # return retval
  255. # ---------------------------------------------------------------
  256. # These methods are the python handlers for the bison targets.
  257. # (which get called by the bison code each time the corresponding
  258. # parse target is unambiguously reached)
  259. #
  260. # WARNING - don't touch the method docstrings unless you know what
  261. # you are doing - they are in bison rule syntax, and are passed
  262. # verbatim to bison to build the parser engine library.
  263. # ---------------------------------------------------------------
  264. # Declare the start target here (by name)
  265. start = 'input'
  266. def on_input(self, target, option, names, values):
  267. """
  268. input :
  269. | input line
  270. """
  271. if option == 1:
  272. # Interactive mode is enabled if the term rewriting system is used
  273. # as a shell. In that case, it is useful that the shell prints the
  274. # output of the evaluation.
  275. if self.interactive and values[1]: # pragma: nocover
  276. print values[1]
  277. return values[1]
  278. def on_line(self, target, option, names, values):
  279. """
  280. line : NEWLINE
  281. | exp NEWLINE
  282. | debug NEWLINE
  283. | HINT NEWLINE
  284. | POSSIBILITIES NEWLINE
  285. | REWRITE NEWLINE
  286. | REWRITE NUMBER NEWLINE
  287. | REWRITE_ALL NEWLINE
  288. | REWRITE_ALL_VERBOSE NEWLINE
  289. | RAISE NEWLINE
  290. """
  291. if option in (1, 2): # rule: {exp,debug} NEWLINE
  292. self.set_root_node(values[0])
  293. return values[0]
  294. if option == 3: # rule: HINT NEWLINE
  295. self.display_hint()
  296. return
  297. if option == 4: # rule: POSSIBILITIES NEWLINE
  298. self.display_possibilities()
  299. return
  300. if option == 5: # rule: REWRITE NEWLINE
  301. return self.rewrite()
  302. if option == 6: # rule: REWRITE NUMBER NEWLINE
  303. self.rewrite(int(values[1]))
  304. return self.root_node
  305. if option in (7, 8): # rule: REWRITE_ALL NEWLINE
  306. return self.rewrite_all(verbose=(option == 8))
  307. if option == 9:
  308. raise RuntimeError('on_line: exception raised')
  309. def on_debug(self, target, option, names, values):
  310. """
  311. debug : GRAPH exp
  312. """
  313. if option == 0:
  314. print generate_graph(negation_to_node(values[1]))
  315. return values[1]
  316. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  317. % (option, target)) # pragma: nocover
  318. def on_exp(self, target, option, names, values):
  319. """
  320. exp : NUMBER
  321. | IDENTIFIER
  322. | LPAREN exp RPAREN
  323. | unary
  324. | binary
  325. | nary
  326. """
  327. # | concat
  328. if option == 0: # rule: NUMBER
  329. # TODO: A bit hacky, this achieves long integers and floats.
  330. value = float(values[0]) if '.' in values[0] else int(values[0])
  331. return Leaf(value)
  332. if option == 1: # rule: IDENTIFIER
  333. return Leaf(values[0])
  334. if option == 2: # rule: LPAREN exp RPAREN
  335. return values[1]
  336. if 3 <= option <= 5: # rule: unary | binary | nary
  337. return values[0]
  338. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  339. % (option, target)) # pragma: nocover
  340. def on_unary(self, target, option, names, values):
  341. """
  342. unary : MINUS exp
  343. | FUNCTION_LPAREN exp RPAREN
  344. | FUNCTION exp
  345. | DERIVATIVE exp
  346. | bracket_derivative
  347. | INTEGRAL exp
  348. | integral_bounds TIMES exp %prec INTEGRAL
  349. | LBRACKET exp RBRACKET lbnd ubnd
  350. | PIPE exp PIPE
  351. """
  352. if option == 0: # rule: NEG exp
  353. values[1].negated += 1
  354. return values[1]
  355. if option in (1, 2): # rule: FUNCTION_LPAREN exp RPAREN | FUNCTION exp
  356. op = values[0].split(' ', 1)[0]
  357. if op == 'ln':
  358. return Node(OP_LOG, values[1], Leaf(E))
  359. if values[1].is_op(OP_COMMA):
  360. return Node(op, *values[1])
  361. if op == OP_VALUE_MAP[OP_LOG]:
  362. return Node(OP_LOG, values[1], Leaf(DEFAULT_LOGARITHM_BASE))
  363. m = re.match(r'^log_([0-9]+|[a-zA-Z])', op)
  364. if m:
  365. value = m.group(1)
  366. if value.isdigit():
  367. value = int(value)
  368. return Node(OP_LOG, values[1], Leaf(value))
  369. return Node(op, values[1])
  370. if option == 3: # rule: DERIVATIVE exp
  371. # DERIVATIVE looks like 'd/d*x*' -> extract the 'x'
  372. return Node(OP_DER, values[1], Leaf(values[0][-2]))
  373. if option == 4: # rule: bracket_derivative
  374. return values[0]
  375. if option == 5: # rule: INTEGRAL exp
  376. fx, x = find_integration_variable(values[1])
  377. return Node(OP_INT, fx, x)
  378. if option == 6: # rule: integral_bounds TIMES exp
  379. lbnd, ubnd = values[0]
  380. fx, x = find_integration_variable(values[2])
  381. return Node(OP_INT, fx, x, lbnd, ubnd)
  382. if option == 7: # rule: LBRACKET exp RBRACKET lbnd ubnd
  383. return Node(OP_INT_INDEF, values[1], values[3], values[4])
  384. if option == 8: # rule: PIPE exp PIPE
  385. return Node(OP_ABS, values[1])
  386. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  387. % (option, target)) # pragma: nocover
  388. def on_integral_bounds(self, target, option, names, values):
  389. """
  390. integral_bounds : INTEGRAL lbnd ubnd
  391. """
  392. if option == 0: # rule: INTEGRAL lbnd ubnd
  393. return values[1], values[2]
  394. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  395. % (option, target)) # pragma: nocover
  396. def on_lbnd(self, target, option, names, values):
  397. """
  398. lbnd : SUB exp
  399. """
  400. if option == 0: # rule: SUB exp
  401. return values[1]
  402. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  403. % (option, target)) # pragma: nocover
  404. def on_ubnd(self, target, option, names, values):
  405. """
  406. ubnd : POW exp
  407. """
  408. if option == 0: # rule: POW exp
  409. return values[1]
  410. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  411. % (option, target)) # pragma: nocover
  412. def on_power(self, target, option, names, values):
  413. """
  414. power : exp POW exp
  415. """
  416. if option == 0: # rule: exp POW exp
  417. return values[0], values[2]
  418. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  419. % (option, target)) # pragma: nocover
  420. def on_bracket_derivative(self, target, option, names, values):
  421. """
  422. bracket_derivative : LBRACKET exp RBRACKET PRIME
  423. | bracket_derivative PRIME
  424. """
  425. if option == 0: # rule: LBRACKET exp RBRACKET PRIME
  426. return Node(OP_DER, values[1])
  427. if option == 1: # rule: bracket_derivative PRIME
  428. return Node(OP_DER, values[0])
  429. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  430. % (option, target)) # pragma: nocover
  431. def on_binary(self, target, option, names, values):
  432. """
  433. binary : exp PLUS exp
  434. | exp TIMES exp
  435. | exp DIVIDE exp
  436. | exp EQ exp
  437. | exp AND exp
  438. | exp OR exp
  439. | exp MINUS exp
  440. | power
  441. """
  442. if 0 <= option <= 5: # rule: exp {PLUS,TIMES,DIVIDE,EQ,AND,OR} exp
  443. return Node(values[1], values[0], values[2])
  444. if option == 6: # rule: exp MINUS exp
  445. right = values[2]
  446. right.negated += 1
  447. # Explicit call the hook handler on the created unary negation.
  448. self.hook_handler('unary', 0, names, values, right)
  449. return Node(OP_ADD, values[0], right)
  450. if option == 7: # rule: power
  451. return Node(OP_POW, *values[0])
  452. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  453. % (option, target)) # pragma: nocover
  454. def on_nary(self, target, option, names, values):
  455. """
  456. nary : exp COMMA exp
  457. """
  458. if option == 0: # rule: exp COMMA exp
  459. return Node(*combine(',', OP_COMMA, values[0], values[2]))
  460. raise BisonSyntaxError('Unsupported option %d in target "%s".'
  461. % (option, target)) # pragma: nocover
  462. # -----------------------------------------
  463. # Special tokens and operator tokens
  464. # -----------------------------------------
  465. operators = ''
  466. functions = []
  467. for token in SPECIAL_TOKENS:
  468. if len(token) > 1:
  469. operators += '"%s"%s{ returntoken(IDENTIFIER); }\n' \
  470. % (token, ' ' * (8 - len(token)))
  471. for op_str, op in OP_MAP.iteritems():
  472. if TOKEN_MAP[op] == 'FUNCTION':
  473. functions.append(op_str)
  474. else:
  475. operators += '"%s"%s{ returntoken(%s); }\n' \
  476. % (op_str, ' ' * (8 - len(op_str)), TOKEN_MAP[op])
  477. # Put all functions in a single regex
  478. if functions:
  479. operators += '("%s")[ ]*"(" { returntoken(FUNCTION_LPAREN); }\n' \
  480. % '"|"'.join(functions)
  481. operators += '("%s") { returntoken(FUNCTION); }\n' \
  482. % '"|"'.join(functions)
  483. # -----------------------------------------
  484. # raw lex script, verbatim here
  485. # -----------------------------------------
  486. lexscript = r"""
  487. %top{
  488. #include "Python.h"
  489. }
  490. %{
  491. #define YYSTYPE void *
  492. #include "tokens.h"
  493. extern void *py_parser;
  494. extern void (*py_input)(PyObject *parser, char *buf, int *result,
  495. int max_size);
  496. #define returntoken(tok) \
  497. yylval = PyString_FromString(strdup(yytext)); return (tok);
  498. #define YY_INPUT(buf,result,max_size) { \
  499. (*py_input)(py_parser, buf, &result, max_size); \
  500. }
  501. int yycolumn = 0;
  502. #define YY_USER_ACTION \
  503. yylloc.first_line = yylloc.last_line = yylineno; \
  504. yylloc.first_column = yycolumn; \
  505. yylloc.last_column = yycolumn + yyleng; \
  506. yycolumn += yyleng;
  507. %}
  508. %option yylineno
  509. %%
  510. d[ ]*"/"[ ]*"d*"[a-z]"*" { returntoken(DERIVATIVE); }
  511. [0-9]+"."?[0-9]* { returntoken(NUMBER); }
  512. [a-zA-Z] { returntoken(IDENTIFIER); }
  513. "(" { returntoken(LPAREN); }
  514. ")" { returntoken(RPAREN); }
  515. "[" { returntoken(LBRACKET); }
  516. "]" { returntoken(RBRACKET); }
  517. "'" { returntoken(PRIME); }
  518. "|" { returntoken(PIPE); }
  519. log_([0-9]+|[a-zA-Z])"*(" { returntoken(FUNCTION_LPAREN); }
  520. log_([0-9]+|[a-zA-Z])"*" { returntoken(FUNCTION); }
  521. """ + operators + r"""
  522. "raise" { returntoken(RAISE); }
  523. "graph" { returntoken(GRAPH); }
  524. "quit" { yyterminate(); returntoken(QUIT); }
  525. [ \t\v\f] { }
  526. [\n] { yycolumn = 0; returntoken(NEWLINE); }
  527. . { printf("unknown char %c ignored.\n", yytext[0]); }
  528. %%
  529. yywrap() { return(1); }
  530. """
  531. #int[ ]*"(" { returntoken(FUNCTION_LPAREN); }