[Python-checkins] r84042 - in sandbox/trunk/2to3/lib2to3: btm_matcher.py btm_utils.py fixer_base.py fixes/fix_apply.py fixes/fix_basestring.py fixes/fix_buffer.py fixes/fix_callable.py fixes/fix_dict.py fixes/fix_except.py fixes/fix_exec.py fixes/fix_execfile.py fixes/fix_exitfunc.py fixes/fix_filter.py fixes/fix_funcattrs.py fixes/fix_future.py fixes/fix_getcwdu.py fixes/fix_has_key.py fixes/fix_idioms.py fixes/fix_import.py fixes/fix_imports.py fixes/fix_input.py fixes/fix_intern.py fixes/fix_isinstance.py fixes/fix_itertools.py fixes/fix_itertools_imports.py fixes/fix_long.py fixes/fix_map.py fixes/fix_metaclass.py fixes/fix_methodattrs.py fixes/fix_next.py fixes/fix_nonzero.py fixes/fix_operator.py fixes/fix_paren.py fixes/fix_print.py fixes/fix_raise.py fixes/fix_raw_input.py fixes/fix_reduce.py fixes/fix_renames.py fixes/fix_repr.py fixes/fix_set_literal.py fixes/fix_standarderror.py fixes/fix_sys_exc.py fixes/fix_throw.py fixes/fix_tuple_params.py fixes/fix_types.py fixes/fix_unicode.py fixes/fix_xrange.py fixes/fix_xreadlines.py fixes/fix_zip.py patcomp.py pygram.py pytree.py refactor.py

george.boutsioukis python-checkins at python.org
Sat Aug 14 23:10:20 CEST 2010


Author: george.boutsioukis
Date: Sat Aug 14 23:10:19 2010
New Revision: 84042

Log:
This revision incorporates into the 2to3 tool the new, faster, tree matching algorithm developed during a GSOC project. The algorithm resides in the two added modules, btm_matcher and btm_utils. New code has been added to drive the new matching process in refactor.py and a few minor changes were made in other modules. A BM_compatible flag(False by default) has been added in fixer_base and it is set to True in most of the current fixers.


Added:
   sandbox/trunk/2to3/lib2to3/btm_matcher.py
   sandbox/trunk/2to3/lib2to3/btm_utils.py
Modified:
   sandbox/trunk/2to3/lib2to3/fixer_base.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_apply.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_basestring.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_buffer.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_callable.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_dict.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_except.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_exec.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_execfile.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_exitfunc.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_filter.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_funcattrs.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_future.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_getcwdu.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_has_key.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_idioms.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_import.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_input.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_intern.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_isinstance.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_itertools.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_itertools_imports.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_long.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_map.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_metaclass.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_methodattrs.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_next.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_nonzero.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_operator.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_paren.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_print.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_raise.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_raw_input.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_reduce.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_renames.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_repr.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_set_literal.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_standarderror.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_sys_exc.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_throw.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_tuple_params.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_types.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_unicode.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_xrange.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_xreadlines.py
   sandbox/trunk/2to3/lib2to3/fixes/fix_zip.py
   sandbox/trunk/2to3/lib2to3/patcomp.py
   sandbox/trunk/2to3/lib2to3/pygram.py
   sandbox/trunk/2to3/lib2to3/pytree.py
   sandbox/trunk/2to3/lib2to3/refactor.py

Added: sandbox/trunk/2to3/lib2to3/btm_matcher.py
==============================================================================
--- (empty file)
+++ sandbox/trunk/2to3/lib2to3/btm_matcher.py	Sat Aug 14 23:10:19 2010
@@ -0,0 +1,170 @@
+"""A bottom-up tree matching algorithm implementation meant to speed
+up 2to3's matching process. After the tree patterns are reduced to
+their rarest linear path, a linear Aho-Corasick automaton is
+created. The linear automaton traverses the linear paths from the
+leaves to the root of the AST and returns a set of nodes for further
+matching. This reduces significantly the number of candidate nodes."""
+
+__author__ = "George Boutsioukis <gboutsioukis at gmail.com>"
+
+import logging
+from .btm_utils import *
+
+class BMNode(object):
+    """Class for a node of the Aho-Corasick automaton used in matching"""
+    last_id = 0
+    def __init__(self):
+        self.transition_table = {}
+        self.fixers = []
+        self.id = BMNode.new_id()
+        self.content = ''
+
+    @classmethod
+    def new_id(cls):
+        new_id = cls.last_id
+        cls.last_id += 1
+        return new_id
+    
+class BottomMatcher(object):
+    """The main matcher class. After instantiating the patterns should
+    be added using the add_fixer method"""
+            
+    def __init__(self):
+        self.match = set()
+        self.root = BMNode()
+        self.nodes = [self.root]
+        self.fixers = []
+        self.logger = logging.getLogger("RefactoringTool")
+
+    def add_fixer(self, fixer):
+        """Reduces a fixer's pattern tree to a linear path and adds it
+        to the matcher(a common Aho-Corasick automaton). The fixer is
+        appended on the matching states and called when they are
+        reached"""
+        self.fixers.append(fixer)
+        tree = reduce_tree(fixer.pattern_tree)
+        linear = tree.get_linear_subpattern()
+        match_nodes = self.add(linear, start=self.root)
+        for match_node in match_nodes:
+            match_node.fixers.append(fixer)
+        
+    def add(self, pattern, start):
+        "Recursively adds a linear pattern to the AC automaton"
+        #print("adding pattern", pattern, "to", start)
+        if not pattern:
+            #print("empty pattern")
+            return [start]
+        if type(pattern[0]) is tuple:
+            #alternatives
+            #print("alternatives")
+            match_nodes = []
+            for alternative in pattern[0]:
+                #add all alternatives, and add the rest of the pattern
+                #to each end node
+                end_nodes = self.add(alternative, start=start)
+                for end in end_nodes:
+                    match_nodes.extend(self.add(pattern[1:], end))
+            return match_nodes
+        else:
+            #single token
+                #not last
+                if pattern[0] not in start.transition_table.keys():
+                    #transition did not exist, create new
+                    next_node = BMNode()
+                    start.transition_table[pattern[0]] = next_node
+                else:
+                    #transition exists already, follow
+                    next_node = start.transition_table[pattern[0]]
+                    
+                if pattern[1:]:
+                    end_nodes = self.add(pattern[1:], start=next_node)
+                else:
+                    end_nodes = [next_node]
+                return end_nodes
+
+    def run(self, leaves):
+        """The main interface with the bottom matcher. The tree is
+        traversed from the bottom using the constructed
+        automaton. Nodes are only checked once as the tree is
+        retraversed. When the automaton fails, we give it one more
+        shot(in case the above tree matches as a whole with the
+        rejected leaf), then we break for the next leaf. There is the
+        special case of multiple arguments(see code comments) where we
+        recheck the nodes
+
+        Args:
+           The leaves of the AST tree to be matched
+           
+        Returns:
+           A dictionary of node matches with fixers as the keys
+        """
+        current_ac_node = self.root
+        results = {}
+        for leaf in leaves:
+            current_ast_node = leaf
+            while(current_ast_node):
+                current_ast_node.was_checked = True
+                for child in current_ast_node.children:
+                    # multiple statements, recheck
+                    if hasattr(child, "value") and child.value==';':
+                        current_ast_node.was_checked = False
+                        break
+                if current_ast_node.type == 1:
+                    #name
+                    node_token = current_ast_node.value
+                else:
+                    node_token = current_ast_node.type
+
+                if node_token in current_ac_node.transition_table.keys():
+                    #token matches
+                    current_ac_node = current_ac_node.transition_table[node_token]
+                    for fixer in current_ac_node.fixers:
+                        if not fixer in results.keys():
+                            results[fixer] = []
+                        results[fixer].append(current_ast_node)
+
+                else:
+                    #matching failed, reset automaton
+                    current_ac_node = self.root
+                    if current_ast_node.parent is not None \
+                           and current_ast_node.parent.was_checked:
+                        #the rest of the tree upwards has been checked, next leaf
+                        break
+
+                    #recheck the rejected node once from the root
+                    if node_token in current_ac_node.transition_table.keys():
+                        #token matches
+                        current_ac_node = current_ac_node.transition_table[node_token]
+                        for fixer in current_ac_node.fixers:
+                            if not fixer in results.keys():
+                                results[fixer] = []
+                            results[fixer].append(current_ast_node)
+
+                current_ast_node = current_ast_node.parent
+        return results
+    
+    def print_ac(self):
+        "Prints a graphviz diagram of the BM automaton(for debugging)"
+        print("digraph g{")
+        def print_node(node):
+            for subnode_key in node.transition_table.keys():
+                subnode = node.transition_table[subnode_key]
+                print("%d -> %d [label=%s] //%s" %
+                      (node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers)))
+                if subnode_key == 1:
+                    print(subnode.content)
+                print_node(subnode)
+        print_node(self.root)
+        print("}")
+    
+# taken from pytree.py for debugging; only used by print_ac
+_type_reprs = {}
+def type_repr(type_num):
+    global _type_reprs
+    if not _type_reprs:
+        from .pygram import python_symbols
+        # printing tokens is possible but not as useful
+        # from .pgen2 import token // token.__dict__.items():
+        for name, val in python_symbols.__dict__.items():
+            if type(val) == int: _type_reprs[val] = name
+    return _type_reprs.setdefault(type_num, type_num)

Added: sandbox/trunk/2to3/lib2to3/btm_utils.py
==============================================================================
--- (empty file)
+++ sandbox/trunk/2to3/lib2to3/btm_utils.py	Sat Aug 14 23:10:19 2010
@@ -0,0 +1,287 @@
+"Utility functions used by the btm_matcher module"
+
+from . import pytree
+from .pgen2 import grammar, token
+from .pygram import pattern_symbols, python_symbols
+
+syms = pattern_symbols.__dict__
+pysyms = python_symbols.__dict__
+tokens = grammar.opmap
+token_labels = token.__dict__
+
+TYPE_ANY = -1
+TYPE_ALTERNATIVES = -2
+TYPE_GROUP = -3
+
+class MinNode(object):
+    """This class serves as an intermediate representation of the
+    pattern tree during the conversion to sets of leaf-to-root
+    subpatterns"""
+    
+    def __init__(self, type=None, name=None, times=1):
+        self.type = type
+        self.name = name
+        self.times = times
+        self.children = []
+        self.leaf = False
+        self.parent = None
+        self.alternatives = []
+        self.alternatives_number = None
+        self.group = []
+
+    def __repr__(self):
+        return str(self.type) + ' ' + str(self.name) + ' ' + str(self.times)
+
+    def leaf_to_root(self):
+        """Internal method. Returns a characteristic path of the
+        pattern tree. This method must be run for all leaves until the
+        linear subpatterns are merged into a single"""
+        node = self
+        subp = []
+        while node:
+            if node.type == TYPE_ALTERNATIVES:
+                node.alternatives.append(subp)
+                if len(node.alternatives) == len(node.children):
+                    #last alternative
+                    subp = [tuple(node.alternatives)]
+                    node.alternatives = []
+                    node = node.parent
+                    continue
+                else:
+                    node = node.parent
+                    subp = None
+                    break
+                
+            if node.type == TYPE_GROUP:
+                node.group.append(subp)
+                #probably should check the number of leaves
+                if len(node.group) == len(node.children):
+                    subp = get_characteristic_subpattern(node.group)
+                    node.group = []
+                    node = node.parent
+                    continue
+                else:
+                    node = node.parent
+                    subp = None
+                    break
+
+            if node.type == token_labels['NAME'] and node.name:
+                #in case of type=name, use the name instead
+                subp.append(node.name)
+            else:
+                subp.append(node.type)
+                
+            node = node.parent
+        return subp
+
+    def get_linear_subpattern(self):
+        """Drives the leaf_to_root method. The reason that
+        leaf_to_root must be run multiple times is because we need to
+        reject 'group' matches; for example the alternative form
+        (a | b c) creates a group [b c] that needs to be matched. Since
+        matching multiple linear patterns overcomes the automaton's
+        capabilities, leaf_to_root merges each group into a single
+        choice based on 'characteristic'ity,
+        
+        i.e. (a|b c) -> (a|b) if b more characteristic than c
+
+        Returns: The most 'characteristic'(as defined by
+          get_characteristic_subpattern) path for the compiled pattern
+          tree.
+        """
+        
+        for l in self.leaves():
+            subp = l.leaf_to_root()
+            if subp:
+                return subp
+          
+    def leaves(self):
+        "Generator that returns the leaves of the tree"
+        for child in self.children:
+            for x in child.leaves():
+                yield x
+        if not self.children:
+            yield self
+
+def reduce_tree(node, parent=None):
+    """
+    Internal function. Reduces a compiled pattern tree to an
+    intermediate representation suitable for feeding the
+    automaton. This also trims off any optional pattern elements(like
+    [a], a*).
+    """
+    
+    new_node = None
+    #switch on the node type
+    if node.type == syms['Matcher']:
+        
+        #skip
+        new_node = reduce_tree(node.children[0])
+
+    elif node.type == syms['Alternatives']:
+        #2 cases
+        if len(node.children)<=2:
+            #just a single 'Alternative', skip this node
+            new_node = reduce_tree(node.children[0], parent)
+        elif len(node.children)>2:
+            #real alternatives
+            new_node = MinNode(type=TYPE_ALTERNATIVES)
+            #skip odd children('|' tokens)
+            for child in node.children:
+                if node.children.index(child)%2:
+                    continue
+                reduced = reduce_tree(child, new_node)
+                if reduced is not None:
+                    new_node.children.append(reduced)
+        else:
+            raise Exception
+    elif node.type == syms['Alternative']:
+        if len(node.children)>1:
+            
+            new_node = MinNode(type=TYPE_GROUP)
+            for child in node.children:
+                reduced = reduce_tree(child, new_node)
+                if reduced:
+                    new_node.children.append(reduced)
+            if not new_node.children:
+                # delete the group if all of the children were reduced to None
+                new_node = None
+                
+        else:
+            new_node = reduce_tree(node.children[0], parent)
+
+    elif node.type == syms['Unit']:
+        if hasattr(node.children[0], "value") and \
+               node.children[0].value == '(':
+            #skip parentheses
+            return reduce_tree(node.children[1], parent)
+        if (hasattr(node.children[0], "value") and \
+               node.children[0].value == '[') \
+               or \
+               (len(node.children)>1 and \
+               hasattr(node.children[1], "value") and \
+               node.children[1].value == '['):
+            #skip whole unit if its optional
+            return None
+            
+        leaf = True
+        details_node = None
+        alternatives_node = None
+        has_repeater = False
+        repeater_node = None
+        has_variable_name = False
+
+        for child in node.children:
+            if child.type == syms['Details']:
+                leaf = False
+                details_node = child
+            elif child.type == syms['Repeater']:
+                has_repeater = True
+                repeater_node = child
+            elif child.type == syms['Alternatives']:
+                alternatives_node = child
+            if hasattr(child, 'value') and child.value == '=': # variable name
+                has_variable_name = True
+
+        #skip variable name
+        if has_variable_name:
+            #skip variable name, '='
+            name_leaf = node.children[2]
+            if hasattr(name_leaf, 'value') and name_leaf.value == '(':
+                # skip parenthesis
+                name_leaf = node.children[3]
+        else:
+            name_leaf = node.children[0]
+
+        #set node type
+        if name_leaf.type == token_labels['NAME']:
+            #(python) non-name or wildcard
+            if name_leaf.value == 'any':
+                new_node = MinNode(type=TYPE_ANY)
+            else:
+                if name_leaf.value in token_labels:
+                    new_node = MinNode(type=token_labels[name_leaf.value])
+                else:
+                    new_node = MinNode(type=pysyms[name_leaf.value])
+                    
+        elif name_leaf.type == token_labels['STRING']:
+            #(python) name or character; remove the apostrophes from
+            #the string value
+            name = name_leaf.value[1:][:-1]
+            if name in tokens:
+                new_node = MinNode(type=tokens[name])
+            else:
+                new_node = MinNode(type=token_labels['NAME'], name=name)
+        elif name_leaf.type == syms['Alternatives']:
+            new_node = reduce_tree(alternatives_node, parent)
+
+        #handle repeaters
+        if has_repeater:
+            if repeater_node.children[0].value == '*':
+                #reduce to None
+                new_node = None
+            elif repeater_node.children[0].value == '+':
+                #reduce to a single occurence i.e. do nothing
+                pass
+            else:
+                #TODO: handle {min, max} repeaters
+                pass
+
+        #add children
+        if details_node and new_node is not None:
+            for child in details_node.children[1:][:-1]:
+                #skip '<', '>' markers
+                reduced = reduce_tree(child, new_node)
+                if reduced is not None:
+                    new_node.children.append(reduced)
+    if new_node:
+        new_node.parent = parent
+    return new_node
+
+
+def get_characteristic_subpattern(subpatterns):
+    """Picks the most characteristic from a list of linear patterns
+    Current order used is:
+    names > common_names > common_chars
+    """
+    if type(subpatterns) is not list:
+        return subpatterns
+    if type(subpatterns) is list and len(subpatterns)==1:
+        return subpatterns[0]
+
+    # first pick out the ones containing variable names
+    subpatterns_with_names = []
+    subpatterns_with_common_names = []
+    common_names = ['in', 'for', 'if' , 'not', 'None']
+    subpatterns_with_common_chars = []
+    common_chars = "[]().,:"
+    for subpattern in subpatterns:
+        if any(rec_test(subpattern, lambda x: type(x) is str)):
+            if any(rec_test(subpattern,
+                            lambda x: type(x) is str and x in common_chars)):
+                subpatterns_with_common_chars.append(subpattern)
+            elif any(rec_test(subpattern,
+                              lambda x: type(x) is str and x in common_names)):
+                subpatterns_with_common_names.append(subpattern)
+                
+            else:
+                subpatterns_with_names.append(subpattern)
+
+    if subpatterns_with_names:
+        subpatterns = subpatterns_with_names
+    elif subpatterns_with_common_names:
+        subpatterns = subpatterns_with_common_names
+    elif subpatterns_with_common_chars:
+        subpatterns = subpatterns_with_common_chars
+    # of the remaining subpatterns pick out the longest one
+    return sorted(subpatterns, key=len, reverse=True)[0]
+
+def rec_test(sequence, test_func):
+    """Tests test_func on all items of sequence and items of included
+    sub-iterables"""
+    for x in sequence:
+        if type(x) is list or type(x) is tuple:
+            for y in rec_test(x, test_func):
+                yield y
+        else:
+            yield test_func(x)

Modified: sandbox/trunk/2to3/lib2to3/fixer_base.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixer_base.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixer_base.py	Sat Aug 14 23:10:19 2010
@@ -24,6 +24,7 @@
 
     PATTERN = None  # Most subclasses should override with a string literal
     pattern = None  # Compiled pattern, set by compile_pattern()
+    pattern_tree = None # Tree representation of the pattern
     options = None  # Options object passed to initializer
     filename = None # The filename (set by set_filename)
     logger = None   # A logger (set by set_filename)
@@ -35,6 +36,12 @@
                     # Lower numbers will be run first.
     _accept_type = None # [Advanced and not public] This tells RefactoringTool
                         # which node type to accept when there's not a pattern.
+                        
+    keep_line_order = False # For the bottom matcher: match with the
+                            # original line order
+    BM_compatible = False # Compatibility with the bottom matching
+                          # module; every fixer should set this
+                          # manually
 
     # Shortcut for access to Python grammar symbols
     syms = pygram.python_symbols
@@ -58,7 +65,8 @@
         self.{pattern,PATTERN} in .match().
         """
         if self.PATTERN is not None:
-            self.pattern = PatternCompiler().compile_pattern(self.PATTERN)
+            self.pattern, self.pattern_tree = \
+                PatternCompiler().compile_pattern(self.PATTERN, with_tree=True)
 
     def set_filename(self, filename):
         """Set the filename, and a logger derived from it.

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_apply.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_apply.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_apply.py	Sat Aug 14 23:10:19 2010
@@ -12,6 +12,7 @@
 from ..fixer_util import Call, Comma, parenthesize
 
 class FixApply(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = """
     power< 'apply'

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_basestring.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_basestring.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_basestring.py	Sat Aug 14 23:10:19 2010
@@ -6,6 +6,7 @@
 from ..fixer_util import Name
 
 class FixBasestring(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = "'basestring'"
 

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_buffer.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_buffer.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_buffer.py	Sat Aug 14 23:10:19 2010
@@ -9,6 +9,7 @@
 
 
 class FixBuffer(fixer_base.BaseFix):
+    BM_compatible = True
 
     explicit = True # The user must ask for this fixer
 

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_callable.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_callable.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_callable.py	Sat Aug 14 23:10:19 2010
@@ -11,6 +11,7 @@
 from lib2to3.fixer_util import Call, Name, String, Attr, touch_import
 
 class FixCallable(fixer_base.BaseFix):
+    BM_compatible = True
 
     order = "pre"
 

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_dict.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_dict.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_dict.py	Sat Aug 14 23:10:19 2010
@@ -40,6 +40,8 @@
 
 
 class FixDict(fixer_base.BaseFix):
+    BM_compatible = True
+
     PATTERN = """
     power< head=any+
          trailer< '.' method=('keys'|'items'|'values'|

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_except.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_except.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_except.py	Sat Aug 14 23:10:19 2010
@@ -34,6 +34,7 @@
                 yield (n, nodes[i+2])
 
 class FixExcept(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = """
     try_stmt< 'try' ':' (simple_stmt | suite)

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_exec.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_exec.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_exec.py	Sat Aug 14 23:10:19 2010
@@ -16,6 +16,7 @@
 
 
 class FixExec(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = """
     exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_execfile.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_execfile.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_execfile.py	Sat Aug 14 23:10:19 2010
@@ -13,6 +13,7 @@
 
 
 class FixExecfile(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = """
     power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_exitfunc.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_exitfunc.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_exitfunc.py	Sat Aug 14 23:10:19 2010
@@ -9,6 +9,8 @@
 
 
 class FixExitfunc(fixer_base.BaseFix):
+    keep_line_order = True
+    BM_compatible = True
 
     PATTERN = """
               (

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_filter.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_filter.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_filter.py	Sat Aug 14 23:10:19 2010
@@ -19,6 +19,7 @@
 from ..fixer_util import Name, Call, ListComp, in_special_context
 
 class FixFilter(fixer_base.ConditionalFix):
+    BM_compatible = True
 
     PATTERN = """
     filter_lambda=power<

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_funcattrs.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_funcattrs.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_funcattrs.py	Sat Aug 14 23:10:19 2010
@@ -7,6 +7,8 @@
 
 
 class FixFuncattrs(fixer_base.BaseFix):
+    BM_compatible = True
+
     PATTERN = """
     power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
                                   | 'func_name' | 'func_defaults' | 'func_code'

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_future.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_future.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_future.py	Sat Aug 14 23:10:19 2010
@@ -9,6 +9,8 @@
 from ..fixer_util import BlankLine
 
 class FixFuture(fixer_base.BaseFix):
+    BM_compatible = True
+
     PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
 
     # This should be run last -- some things check for the import

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_getcwdu.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_getcwdu.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_getcwdu.py	Sat Aug 14 23:10:19 2010
@@ -8,6 +8,7 @@
 from ..fixer_util import Name
 
 class FixGetcwdu(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = """
               power< 'os' trailer< dot='.' name='getcwdu' > any* >

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_has_key.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_has_key.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_has_key.py	Sat Aug 14 23:10:19 2010
@@ -37,6 +37,7 @@
 
 
 class FixHasKey(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = """
     anchor=power<

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_idioms.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_idioms.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_idioms.py	Sat Aug 14 23:10:19 2010
@@ -35,7 +35,6 @@
 TYPE = "power< 'type' trailer< '(' x=any ')' > >"
 
 class FixIdioms(fixer_base.BaseFix):
-
     explicit = True # The user must ask for this fixer
 
     PATTERN = r"""

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_import.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_import.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_import.py	Sat Aug 14 23:10:19 2010
@@ -36,6 +36,7 @@
 
 
 class FixImport(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = """
     import_from< 'from' imp=any 'import' ['('] any [')'] >

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_imports.py	Sat Aug 14 23:10:19 2010
@@ -84,6 +84,8 @@
 
 class FixImports(fixer_base.BaseFix):
 
+    BM_compatible = True
+    keep_line_order = True
     # This is overridden in fix_imports2.
     mapping = MAPPING
 

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_input.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_input.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_input.py	Sat Aug 14 23:10:19 2010
@@ -11,7 +11,7 @@
 
 
 class FixInput(fixer_base.BaseFix):
-
+    BM_compatible = True
     PATTERN = """
               power< 'input' args=trailer< '(' [any] ')' > >
               """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_intern.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_intern.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_intern.py	Sat Aug 14 23:10:19 2010
@@ -12,7 +12,7 @@
 
 
 class FixIntern(fixer_base.BaseFix):
-
+    BM_compatible = True
     order = "pre"
 
     PATTERN = """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_isinstance.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_isinstance.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_isinstance.py	Sat Aug 14 23:10:19 2010
@@ -14,7 +14,7 @@
 
 
 class FixIsinstance(fixer_base.BaseFix):
-
+    BM_compatible = True
     PATTERN = """
     power<
         'isinstance'

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_itertools.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_itertools.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_itertools.py	Sat Aug 14 23:10:19 2010
@@ -12,6 +12,7 @@
 from ..fixer_util import Name
 
 class FixItertools(fixer_base.BaseFix):
+    BM_compatible = True
     it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')"
     PATTERN = """
               power< it='itertools'

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_itertools_imports.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_itertools_imports.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_itertools_imports.py	Sat Aug 14 23:10:19 2010
@@ -6,6 +6,7 @@
 
 
 class FixItertoolsImports(fixer_base.BaseFix):
+    BM_compatible = True
     PATTERN = """
               import_from< 'from' 'itertools' 'import' imports=any >
               """ %(locals())

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_long.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_long.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_long.py	Sat Aug 14 23:10:19 2010
@@ -10,7 +10,7 @@
 
 
 class FixLong(fixer_base.BaseFix):
-
+    BM_compatible = True
     PATTERN = "'long'"
 
     def transform(self, node, results):

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_map.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_map.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_map.py	Sat Aug 14 23:10:19 2010
@@ -26,6 +26,7 @@
 from ..pygram import python_symbols as syms
 
 class FixMap(fixer_base.ConditionalFix):
+    BM_compatible = True
 
     PATTERN = """
     map_none=power<

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_metaclass.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_metaclass.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_metaclass.py	Sat Aug 14 23:10:19 2010
@@ -143,6 +143,7 @@
 
 
 class FixMetaclass(fixer_base.BaseFix):
+    BM_compatible = True
 
     PATTERN = """
     classdef<any*>

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_methodattrs.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_methodattrs.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_methodattrs.py	Sat Aug 14 23:10:19 2010
@@ -13,6 +13,7 @@
     }
 
 class FixMethodattrs(fixer_base.BaseFix):
+    BM_compatible = True
     PATTERN = """
     power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
     """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_next.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_next.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_next.py	Sat Aug 14 23:10:19 2010
@@ -15,6 +15,7 @@
 
 
 class FixNext(fixer_base.BaseFix):
+    BM_compatible = True
     PATTERN = """
     power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
     |

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_nonzero.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_nonzero.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_nonzero.py	Sat Aug 14 23:10:19 2010
@@ -6,6 +6,7 @@
 from ..fixer_util import Name, syms
 
 class FixNonzero(fixer_base.BaseFix):
+    BM_compatible = True
     PATTERN = """
     classdef< 'class' any+ ':'
               suite< any*

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_operator.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_operator.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_operator.py	Sat Aug 14 23:10:19 2010
@@ -15,7 +15,7 @@
 
 
 class FixOperator(fixer_base.BaseFix):
-
+    BM_compatible = True
     order = "pre"
 
     methods = """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_paren.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_paren.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_paren.py	Sat Aug 14 23:10:19 2010
@@ -10,6 +10,8 @@
 
 # XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2]
 class FixParen(fixer_base.BaseFix):
+    BM_compatible = True
+
     PATTERN = """
         atom< ('[' | '(')
             (listmaker< any

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_print.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_print.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_print.py	Sat Aug 14 23:10:19 2010
@@ -28,6 +28,8 @@
 
 class FixPrint(fixer_base.BaseFix):
 
+    BM_compatible = True
+
     PATTERN = """
               simple_stmt< any* bare='print' any* > | print_stmt
               """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_raise.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_raise.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_raise.py	Sat Aug 14 23:10:19 2010
@@ -29,6 +29,7 @@
 
 class FixRaise(fixer_base.BaseFix):
 
+    BM_compatible = True
     PATTERN = """
     raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
     """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_raw_input.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_raw_input.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_raw_input.py	Sat Aug 14 23:10:19 2010
@@ -7,6 +7,7 @@
 
 class FixRawInput(fixer_base.BaseFix):
 
+    BM_compatible = True
     PATTERN = """
               power< name='raw_input' trailer< '(' [any] ')' > any* >
               """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_reduce.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_reduce.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_reduce.py	Sat Aug 14 23:10:19 2010
@@ -14,6 +14,7 @@
 
 class FixReduce(fixer_base.BaseFix):
 
+    BM_compatible = True
     order = "pre"
 
     PATTERN = """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_renames.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_renames.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_renames.py	Sat Aug 14 23:10:19 2010
@@ -40,6 +40,7 @@
 
 
 class FixRenames(fixer_base.BaseFix):
+    BM_compatible = True
     PATTERN = "|".join(build_pattern())
 
     order = "pre" # Pre-order tree traversal

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_repr.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_repr.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_repr.py	Sat Aug 14 23:10:19 2010
@@ -10,6 +10,7 @@
 
 class FixRepr(fixer_base.BaseFix):
 
+    BM_compatible = True
     PATTERN = """
               atom < '`' expr=any '`' >
               """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_set_literal.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_set_literal.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_set_literal.py	Sat Aug 14 23:10:19 2010
@@ -11,6 +11,7 @@
 
 class FixSetLiteral(fixer_base.BaseFix):
 
+    BM_compatible = True
     explicit = True
 
     PATTERN = """power< 'set' trailer< '('

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_standarderror.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_standarderror.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_standarderror.py	Sat Aug 14 23:10:19 2010
@@ -9,7 +9,7 @@
 
 
 class FixStandarderror(fixer_base.BaseFix):
-
+    BM_compatible = True
     PATTERN = """
               'StandardError'
               """

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_sys_exc.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_sys_exc.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_sys_exc.py	Sat Aug 14 23:10:19 2010
@@ -14,6 +14,7 @@
 class FixSysExc(fixer_base.BaseFix):
     # This order matches the ordering of sys.exc_info().
     exc_info = [u"exc_type", u"exc_value", u"exc_traceback"]
+    BM_compatible = True
     PATTERN = """
               power< 'sys' trailer< dot='.' attribute=(%s) > >
               """ % '|'.join("'%s'" % e for e in exc_info)

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_throw.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_throw.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_throw.py	Sat Aug 14 23:10:19 2010
@@ -14,7 +14,7 @@
 from ..fixer_util import Name, Call, ArgList, Attr, is_tuple
 
 class FixThrow(fixer_base.BaseFix):
-
+    BM_compatible = True
     PATTERN = """
     power< any trailer< '.' 'throw' >
            trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' >

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_tuple_params.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_tuple_params.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_tuple_params.py	Sat Aug 14 23:10:19 2010
@@ -29,6 +29,10 @@
            stmt.children[0].type == token.STRING
 
 class FixTupleParams(fixer_base.BaseFix):
+    run_order = 4 #use a lower order since lambda is part of other
+                  #patterns
+    BM_compatible = True
+    
     PATTERN = """
               funcdef< 'def' any parameters< '(' args=any ')' >
                        ['->' any] ':' suite=any+ >

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_types.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_types.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_types.py	Sat Aug 14 23:10:19 2010
@@ -52,7 +52,7 @@
 _pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
 
 class FixTypes(fixer_base.BaseFix):
-
+    BM_compatible = True
     PATTERN = '|'.join(_pats)
 
     def transform(self, node, results):

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_unicode.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_unicode.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_unicode.py	Sat Aug 14 23:10:19 2010
@@ -10,7 +10,7 @@
 _literal_re = re.compile(ur"[uU][rR]?[\'\"]")
 
 class FixUnicode(fixer_base.BaseFix):
-
+    BM_compatible = True
     PATTERN = "STRING | 'unicode' | 'unichr'"
 
     def transform(self, node, results):

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_xrange.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_xrange.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_xrange.py	Sat Aug 14 23:10:19 2010
@@ -10,7 +10,7 @@
 
 
 class FixXrange(fixer_base.BaseFix):
-
+    BM_compatible = True
     PATTERN = """
               power<
                  (name='range'|name='xrange') trailer< '(' args=any ')' >

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_xreadlines.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_xreadlines.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_xreadlines.py	Sat Aug 14 23:10:19 2010
@@ -9,6 +9,7 @@
 
 
 class FixXreadlines(fixer_base.BaseFix):
+    BM_compatible = True
     PATTERN = """
     power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
     |

Modified: sandbox/trunk/2to3/lib2to3/fixes/fix_zip.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/fixes/fix_zip.py	(original)
+++ sandbox/trunk/2to3/lib2to3/fixes/fix_zip.py	Sat Aug 14 23:10:19 2010
@@ -13,11 +13,12 @@
 
 class FixZip(fixer_base.ConditionalFix):
 
+    BM_compatible = True
     PATTERN = """
     power< 'zip' args=trailer< '(' [any] ')' >
     >
     """
-
+    
     skip_on = "future_builtins.zip"
 
     def transform(self, node, results):

Modified: sandbox/trunk/2to3/lib2to3/patcomp.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/patcomp.py	(original)
+++ sandbox/trunk/2to3/lib2to3/patcomp.py	Sat Aug 14 23:10:19 2010
@@ -52,14 +52,17 @@
         self.pysyms = pygram.python_symbols
         self.driver = driver.Driver(self.grammar, convert=pattern_convert)
 
-    def compile_pattern(self, input, debug=False):
+    def compile_pattern(self, input, debug=False, with_tree=False):
         """Compiles a pattern string to a nested pytree.*Pattern object."""
         tokens = tokenize_wrapper(input)
         try:
             root = self.driver.parse_tokens(tokens, debug=debug)
-        except parse.ParseError, e:
+        except parse.ParseError as e:
             raise PatternSyntaxError(str(e))
-        return self.compile_node(root)
+        if with_tree:
+            return self.compile_node(root), root
+        else:
+            return self.compile_node(root)
 
     def compile_node(self, node):
         """Compiles a node, recursively.

Modified: sandbox/trunk/2to3/lib2to3/pygram.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/pygram.py	(original)
+++ sandbox/trunk/2to3/lib2to3/pygram.py	Sat Aug 14 23:10:19 2010
@@ -13,6 +13,8 @@
 
 # The grammar file
 _GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt")
+_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
+                                     "PatternGrammar.txt")
 
 
 class Symbols(object):
@@ -33,3 +35,6 @@
 
 python_grammar_no_print_statement = python_grammar.copy()
 del python_grammar_no_print_statement.keywords["print"]
+
+pattern_grammar = driver.load_grammar(_PATTERN_GRAMMAR_FILE)
+pattern_symbols = Symbols(pattern_grammar)

Modified: sandbox/trunk/2to3/lib2to3/pytree.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/pytree.py	(original)
+++ sandbox/trunk/2to3/lib2to3/pytree.py	Sat Aug 14 23:10:19 2010
@@ -16,7 +16,6 @@
 import warnings
 from StringIO import StringIO
 
-
 HUGE = 0x7FFFFFFF  # maximum repeat count, default max
 
 _type_reprs = {}
@@ -30,7 +29,6 @@
             if type(val) == int: _type_reprs[val] = name
     return _type_reprs.setdefault(type_num, type_num)
 
-
 class Base(object):
 
     """
@@ -47,7 +45,8 @@
     parent = None  # Parent node pointer, or None
     children = ()  # Tuple of subnodes
     was_changed = False
-
+    was_checked = False
+    
     def __new__(cls, *args, **kwds):
         """Constructor that prevents Base from being instantiated."""
         assert cls is not Base, "Cannot instantiate Base"
@@ -213,6 +212,25 @@
                     return None
                 return self.parent.children[i-1]
 
+    def leaves(self):
+        for child in self.children:
+            for x in child.leaves():
+                yield x
+        if type(self) is Leaf:
+            yield self
+
+    def to_root(self):
+        yield self
+        if self.parent:
+            for p in self.parent.to_root():
+                yield p
+
+    def depth(self):
+        if self.parent is None:
+            return 0
+        return 1 + self.parent.depth()
+            
+
     def get_suffix(self):
         """
         Return the string immediately following the invocant node. This is
@@ -227,12 +245,14 @@
         def __str__(self):
             return unicode(self).encode("ascii")
 
-
 class Node(Base):
 
     """Concrete implementation for interior nodes."""
 
-    def __init__(self, type, children, context=None, prefix=None):
+    def __init__(self,type, children,
+                 context=None,
+                 prefix=None,
+                 fixers_applied=[]):
         """
         Initializer.
 
@@ -249,7 +269,8 @@
             ch.parent = self
         if prefix is not None:
             self.prefix = prefix
-
+        self.fixers_applied = fixers_applied[:]
+    
     def __repr__(self):
         """Return a canonical string representation."""
         return "%s(%s, %r)" % (self.__class__.__name__,
@@ -273,7 +294,8 @@
 
     def clone(self):
         """Return a cloned (deep) copy of self."""
-        return Node(self.type, [ch.clone() for ch in self.children])
+        return Node(self.type, [ch.clone() for ch in self.children],
+                    fixers_applied=self.fixers_applied)
 
     def post_order(self):
         """Return a post-order iterator for the tree."""
@@ -286,7 +308,7 @@
         """Return a pre-order iterator for the tree."""
         yield self
         for child in self.children:
-            for node in child.pre_order():
+            for node in child.post_order():
                 yield node
 
     def _prefix_getter(self):
@@ -341,7 +363,10 @@
     lineno = 0    # Line where this token starts in the input
     column = 0    # Column where this token tarts in the input
 
-    def __init__(self, type, value, context=None, prefix=None):
+    def __init__(self, type, value,
+                 context=None,
+                 prefix=None,
+                 fixers_applied=[]):
         """
         Initializer.
 
@@ -355,7 +380,8 @@
         self.value = value
         if prefix is not None:
             self._prefix = prefix
-
+        self.fixers_applied = fixers_applied[:]
+        
     def __repr__(self):
         """Return a canonical string representation."""
         return "%s(%r, %r)" % (self.__class__.__name__,
@@ -380,7 +406,8 @@
     def clone(self):
         """Return a cloned (deep) copy of self."""
         return Leaf(self.type, self.value,
-                    (self.prefix, (self.lineno, self.column)))
+                    (self.prefix, (self.lineno, self.column)),
+                    fixers_applied=self.fixers_applied)
 
     def post_order(self):
         """Return a post-order iterator for the tree."""

Modified: sandbox/trunk/2to3/lib2to3/refactor.py
==============================================================================
--- sandbox/trunk/2to3/lib2to3/refactor.py	(original)
+++ sandbox/trunk/2to3/lib2to3/refactor.py	Sat Aug 14 23:10:19 2010
@@ -24,7 +24,10 @@
 
 # Local imports
 from .pgen2 import driver, tokenize, token
+from .fixer_util import find_root
 from . import pytree, pygram
+from . import btm_utils as bu
+from . import btm_matcher as bm
 
 
 def get_all_fix_names(fixer_pkg, remove_prefix=True):
@@ -174,7 +177,7 @@
 
     CLASS_PREFIX = "Fix" # The prefix for fixer classes
     FILE_PREFIX = "fix_" # The prefix for modules with a fixer within
-
+    
     def __init__(self, fixer_names, options=None, explicit=None):
         """Initializer.
 
@@ -201,10 +204,27 @@
                                     logger=self.logger)
         self.pre_order, self.post_order = self.get_fixers()
 
-        self.pre_order_heads = _get_headnode_dict(self.pre_order)
-        self.post_order_heads = _get_headnode_dict(self.post_order)
 
         self.files = []  # List of files that were or should be modified
+        
+        self.BM = bm.BottomMatcher()
+        self.bmi_pre_order = [] # Bottom Matcher incompatible fixers
+        self.bmi_post_order = []
+
+        for fixer in chain(self.post_order, self.pre_order):
+            if fixer.BM_compatible:
+                self.BM.add_fixer(fixer)
+                # remove fixers that will be handled by the bottom-up
+                # matcher
+            elif fixer in self.pre_order:
+                self.bmi_pre_order.append(fixer)
+            elif fixer in self.post_order:
+                self.bmi_post_order.append(fixer)
+
+        self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order)
+        self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order)
+
+  
 
     def get_fixers(self):
         """Inspects the options to load the requested patterns and handlers.
@@ -268,6 +288,7 @@
 
     def refactor(self, items, write=False, doctests_only=False):
         """Refactor a list of files and directories."""
+
         for dir_or_file in items:
             if os.path.isdir(dir_or_file):
                 self.refactor_dir(dir_or_file, write, doctests_only)
@@ -299,7 +320,7 @@
         """
         try:
             f = open(filename, "rb")
-        except IOError, err:
+        except IOError as err:
             self.log_error("Can't open %s: %s", filename, err)
             return None, None
         try:
@@ -348,7 +369,7 @@
             self.driver.grammar = pygram.python_grammar_no_print_statement
         try:
             tree = self.driver.parse_string(data)
-        except Exception, err:
+        except Exception as err:
             self.log_error("Can't parse %s: %s: %s",
                            name, err.__class__.__name__, err)
             return
@@ -378,6 +399,10 @@
     def refactor_tree(self, tree, name):
         """Refactors a parse tree (modifying the tree in place).
 
+        For compatible patterns the bottom matcher module is
+        used. Otherwise the tree is traversed node-to-node for
+        matches.
+
         Args:
             tree: a pytree.Node instance representing the root of the tree
                   to be refactored.
@@ -386,12 +411,63 @@
         Returns:
             True if the tree was modified, False otherwise.
         """
+        
         for fixer in chain(self.pre_order, self.post_order):
             fixer.start_tree(tree, name)
 
-        self.traverse_by(self.pre_order_heads, tree.pre_order())
-        self.traverse_by(self.post_order_heads, tree.post_order())
-
+        #use traditional matching for the incompatible fixers
+        self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
+        self.traverse_by(self.bmi_post_order_heads, tree.post_order())
+
+        # obtain a set of candidate nodes
+        match_set = self.BM.run(tree.leaves())
+
+        while any(list(match_set.values())):
+            for fixer in self.BM.fixers:
+                if fixer in match_set.keys() and match_set[fixer]:
+                    #sort by depth; apply fixers from bottom(of the AST) to top
+                    match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
+
+                    if fixer.keep_line_order:
+                        #some fixers(eg fix_imports) must be applied
+                        #with the original file's line order
+                        match_set[fixer].sort(key=pytree.Base.get_lineno)
+
+                    for node in list(match_set[fixer]):
+                        if node in match_set[fixer]:
+                            match_set[fixer].remove(node)
+
+                        try:
+                            find_root(node)
+                        except AssertionError:
+                            # this node has been cut off from a
+                            # previous transformation ; skip
+                            continue
+                        
+                        if fixer in node.fixers_applied:
+                            # do not apply the same fixer again
+                            continue
+
+                        results = fixer.match(node)
+                        
+                        if results:
+                            new = fixer.transform(node, results)
+                            if new is not None:
+                                node.replace(new)
+                                #new.fixers_applied.append(fixer)
+                                for node in new.post_order():
+                                    # do not apply the fixer again to
+                                    # this or any subnode
+                                    node.fixers_applied.append(fixer)
+
+                                # update the original match set for
+                                # the added code
+                                new_matches = self.BM.run(new.leaves())
+                                for fxr in new_matches.keys():
+                                    if not fxr in list(match_set.keys()):
+                                        match_set[fxr]=[]
+                                    match_set[fxr].extend(new_matches[fxr])
+                                    
         for fixer in chain(self.pre_order, self.post_order):
             fixer.finish_tree(tree, name)
         return tree.was_changed
@@ -448,12 +524,12 @@
         """
         try:
             f = _open_with_encoding(filename, "w", encoding=encoding)
-        except os.error, err:
+        except os.error as err:
             self.log_error("Can't create %s: %s", filename, err)
             return
         try:
             f.write(_to_system_newlines(new_text))
-        except os.error, err:
+        except os.error as err:
             self.log_error("Can't write %s: %s", filename, err)
         finally:
             f.close()
@@ -516,8 +592,8 @@
         """
         try:
             tree = self.parse_block(block, lineno, indent)
-        except Exception, err:
-            if self.logger.isEnabledFor(logging.DEBUG):
+        except Exception as err:
+            if self.log.isEnabledFor(logging.DEBUG):
                 for line in block:
                     self.log_debug("Source: %s", line.rstrip(u"\n"))
             self.log_error("Can't parse docstring in %s line %s: %s: %s",


More information about the Python-checkins mailing list