Commit e6324624 authored by Taddeus Kroes's avatar Taddeus Kroes

Added global optimization and dataflow anaysis to optimization loop.

parent ae2cc948
......@@ -11,23 +11,27 @@ def optimize(program, verbose=0):
# Remember original number of statements
o = program.count_instructions()
# Optimize on a global level
program.optimize_global()
g = program.count_instructions()
changed = True
# Perform dataflow analysis
program.perform_dataflow_analysis()
while changed:
changed = False
# Optimize basic blocks
program.optimize_blocks()
# Optimize on a global level
if program.optimize_global():
changed = True
# Concatenate optimized blocks to obtain
# Perform dataflow analysis on new blocks
program.perform_dataflow_analysis()
# Optimize basic blocks
if program.optimize_blocks():
changed = True
# Count number of instructions after optimization
b = program.count_instructions()
# Print results
if verbose:
print 'Original statements: %d' % o
print 'After global optimization: %d (%d removed)' % (g, o - g)
print 'After basic block optimization: %d (%d removed)' % (b, g - b)
print 'Statements removed: %d (%d%%)' \
% (o - b, int((o - b) / float(b) * 100))
......@@ -62,7 +62,10 @@ class Program(Block):
def optimize_global(self):
"""Optimize on a global level."""
remove_redundant_jumps(self)
if not hasattr(self, 'statements'):
self.statements = self.get_statements()
return remove_redundant_jumps(self)
def optimize_blocks(self):
"""Optimize on block level. Keep executing all optimizations until no
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment