Commit e6324624 authored by Taddeus Kroes's avatar Taddeus Kroes

Added global optimization and dataflow anaysis to optimization loop.

parent ae2cc948
...@@ -11,23 +11,27 @@ def optimize(program, verbose=0): ...@@ -11,23 +11,27 @@ def optimize(program, verbose=0):
# Remember original number of statements # Remember original number of statements
o = program.count_instructions() o = program.count_instructions()
changed = True
while changed:
changed = False
# Optimize on a global level # Optimize on a global level
program.optimize_global() if program.optimize_global():
g = program.count_instructions() changed = True
# Perform dataflow analysis # Perform dataflow analysis on new blocks
program.perform_dataflow_analysis() program.perform_dataflow_analysis()
# Optimize basic blocks # Optimize basic blocks
program.optimize_blocks() if program.optimize_blocks():
changed = True
# Concatenate optimized blocks to obtain # Count number of instructions after optimization
b = program.count_instructions() b = program.count_instructions()
# Print results # Print results
if verbose: if verbose:
print 'Original statements: %d' % o print 'Original statements: %d' % o
print 'After global optimization: %d (%d removed)' % (g, o - g)
print 'After basic block optimization: %d (%d removed)' % (b, g - b)
print 'Statements removed: %d (%d%%)' \ print 'Statements removed: %d (%d%%)' \
% (o - b, int((o - b) / float(b) * 100)) % (o - b, int((o - b) / float(b) * 100))
...@@ -62,7 +62,10 @@ class Program(Block): ...@@ -62,7 +62,10 @@ class Program(Block):
def optimize_global(self): def optimize_global(self):
"""Optimize on a global level.""" """Optimize on a global level."""
remove_redundant_jumps(self) if not hasattr(self, 'statements'):
self.statements = self.get_statements()
return remove_redundant_jumps(self)
def optimize_blocks(self): def optimize_blocks(self):
"""Optimize on block level. Keep executing all optimizations until no """Optimize on block level. Keep executing all optimizations until no
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment