Commit b0871cac authored by Benjamin Peterson's avatar Benjamin Peterson

Merged revisions 85510 via svnmerge from

svn+ssh://pythondev@svn.python.org/python/branches/py3k

................
  r85510 | benjamin.peterson | 2010-10-14 18:00:04 -0500 (Thu, 14 Oct 2010) | 61 lines

  Merged revisions 83852-83853,83857,84042,84216,84274-84276,84375,85388,85478,85506-85508 via svnmerge from
  svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3

  ........
    r83852 | benjamin.peterson | 2010-08-08 15:45:44 -0500 (Sun, 08 Aug 2010) | 1 line

    wrap with parens
  ........
    r83853 | benjamin.peterson | 2010-08-08 15:46:31 -0500 (Sun, 08 Aug 2010) | 1 line

    use parens
  ........
    r83857 | benjamin.peterson | 2010-08-08 15:59:49 -0500 (Sun, 08 Aug 2010) | 1 line

    things which use touch_import should be pre order
  ........
    r84042 | george.boutsioukis | 2010-08-14 16:10:19 -0500 (Sat, 14 Aug 2010) | 2 lines

    This revision incorporates into the 2to3 tool the new, faster, tree matching algorithm developed during a GSOC project. The algorithm resides in the two added modules, btm_matcher and btm_utils. New code has been added to drive the new matching process in refactor.py and a few minor changes were made in other modules. A BM_compatible flag(False by default) has been added in fixer_base and it is set to True in most of the current fixers.
  ........
    r84216 | benjamin.peterson | 2010-08-19 16:44:05 -0500 (Thu, 19 Aug 2010) | 1 line

    allow star_expr in testlist_gexp
  ........
    r84274 | benjamin.peterson | 2010-08-22 18:40:46 -0500 (Sun, 22 Aug 2010) | 1 line

    wrap long line
  ........
    r84275 | benjamin.peterson | 2010-08-22 18:42:22 -0500 (Sun, 22 Aug 2010) | 1 line

    cleanup
  ........
    r84276 | benjamin.peterson | 2010-08-22 18:51:01 -0500 (Sun, 22 Aug 2010) | 1 line

    when there's a None value and a traceback, don't call type with it #9661
  ........
    r84375 | george.boutsioukis | 2010-08-31 08:38:53 -0500 (Tue, 31 Aug 2010) | 3 lines

    Idiomatic code changes & stylistic issues fixed in the BottomMatcher module. Thanks to Benjamin Peterson for taking the time to review the code.
  ........
    r85388 | benjamin.peterson | 2010-10-12 17:27:44 -0500 (Tue, 12 Oct 2010) | 1 line

    fix urllib fixer with multiple as imports on a line #10069
  ........
    r85478 | benjamin.peterson | 2010-10-14 08:09:56 -0500 (Thu, 14 Oct 2010) | 1 line

    stop abusing docstrings
  ........
    r85506 | benjamin.peterson | 2010-10-14 17:45:19 -0500 (Thu, 14 Oct 2010) | 1 line

    kill sibling import
  ........
    r85507 | benjamin.peterson | 2010-10-14 17:54:15 -0500 (Thu, 14 Oct 2010) | 1 line

    remove trailing whitespace
  ........
    r85508 | benjamin.peterson | 2010-10-14 17:55:28 -0500 (Thu, 14 Oct 2010) | 1 line

    typo
  ........
................
parent ea5d827b
...@@ -128,7 +128,7 @@ atom: ('(' [yield_expr|testlist_gexp] ')' | ...@@ -128,7 +128,7 @@ atom: ('(' [yield_expr|testlist_gexp] ')' |
'`' testlist1 '`' | '`' testlist1 '`' |
NAME | NUMBER | STRING+ | '.' '.' '.') NAME | NUMBER | STRING+ | '.' '.' '.')
listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) listmaker: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
testlist_gexp: test ( comp_for | (',' (test|star_expr))* [','] ) testlist_gexp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] )
lambdef: 'lambda' [varargslist] ':' test lambdef: 'lambda' [varargslist] ':' test
trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
subscriptlist: subscript (',' subscript)* [','] subscriptlist: subscript (',' subscript)* [',']
......
"""A bottom-up tree matching algorithm implementation meant to speed
up 2to3's matching process. After the tree patterns are reduced to
their rarest linear path, a linear Aho-Corasick automaton is
created. The linear automaton traverses the linear paths from the
leaves to the root of the AST and returns a set of nodes for further
matching. This reduces significantly the number of candidate nodes."""
__author__ = "George Boutsioukis <gboutsioukis@gmail.com>"
import logging
import itertools
from collections import defaultdict
from . import pytree
from .btm_utils import reduce_tree
class BMNode(object):
"""Class for a node of the Aho-Corasick automaton used in matching"""
count = itertools.count()
def __init__(self):
self.transition_table = {}
self.fixers = []
self.id = next(BMNode.count)
self.content = ''
class BottomMatcher(object):
"""The main matcher class. After instantiating the patterns should
be added using the add_fixer method"""
def __init__(self):
self.match = set()
self.root = BMNode()
self.nodes = [self.root]
self.fixers = []
self.logger = logging.getLogger("RefactoringTool")
def add_fixer(self, fixer):
"""Reduces a fixer's pattern tree to a linear path and adds it
to the matcher(a common Aho-Corasick automaton). The fixer is
appended on the matching states and called when they are
reached"""
self.fixers.append(fixer)
tree = reduce_tree(fixer.pattern_tree)
linear = tree.get_linear_subpattern()
match_nodes = self.add(linear, start=self.root)
for match_node in match_nodes:
match_node.fixers.append(fixer)
def add(self, pattern, start):
"Recursively adds a linear pattern to the AC automaton"
#print("adding pattern", pattern, "to", start)
if not pattern:
#print("empty pattern")
return [start]
if isinstance(pattern[0], tuple):
#alternatives
#print("alternatives")
match_nodes = []
for alternative in pattern[0]:
#add all alternatives, and add the rest of the pattern
#to each end node
end_nodes = self.add(alternative, start=start)
for end in end_nodes:
match_nodes.extend(self.add(pattern[1:], end))
return match_nodes
else:
#single token
#not last
if pattern[0] not in start.transition_table:
#transition did not exist, create new
next_node = BMNode()
start.transition_table[pattern[0]] = next_node
else:
#transition exists already, follow
next_node = start.transition_table[pattern[0]]
if pattern[1:]:
end_nodes = self.add(pattern[1:], start=next_node)
else:
end_nodes = [next_node]
return end_nodes
def run(self, leaves):
"""The main interface with the bottom matcher. The tree is
traversed from the bottom using the constructed
automaton. Nodes are only checked once as the tree is
retraversed. When the automaton fails, we give it one more
shot(in case the above tree matches as a whole with the
rejected leaf), then we break for the next leaf. There is the
special case of multiple arguments(see code comments) where we
recheck the nodes
Args:
The leaves of the AST tree to be matched
Returns:
A dictionary of node matches with fixers as the keys
"""
current_ac_node = self.root
results = defaultdict(list)
for leaf in leaves:
current_ast_node = leaf
while current_ast_node:
current_ast_node.was_checked = True
for child in current_ast_node.children:
# multiple statements, recheck
if isinstance(child, pytree.Leaf) and child.value == ";":
current_ast_node.was_checked = False
break
if current_ast_node.type == 1:
#name
node_token = current_ast_node.value
else:
node_token = current_ast_node.type
if node_token in current_ac_node.transition_table:
#token matches
current_ac_node = current_ac_node.transition_table[node_token]
for fixer in current_ac_node.fixers:
if not fixer in results:
results[fixer] = []
results[fixer].append(current_ast_node)
else:
#matching failed, reset automaton
current_ac_node = self.root
if (current_ast_node.parent is not None
and current_ast_node.parent.was_checked):
#the rest of the tree upwards has been checked, next leaf
break
#recheck the rejected node once from the root
if node_token in current_ac_node.transition_table:
#token matches
current_ac_node = current_ac_node.transition_table[node_token]
for fixer in current_ac_node.fixers:
if not fixer in results.keys():
results[fixer] = []
results[fixer].append(current_ast_node)
current_ast_node = current_ast_node.parent
return results
def print_ac(self):
"Prints a graphviz diagram of the BM automaton(for debugging)"
print("digraph g{")
def print_node(node):
for subnode_key in node.transition_table.keys():
subnode = node.transition_table[subnode_key]
print("%d -> %d [label=%s] //%s" %
(node.id, subnode.id, type_repr(subnode_key), str(subnode.fixers)))
if subnode_key == 1:
print(subnode.content)
print_node(subnode)
print_node(self.root)
print("}")
# taken from pytree.py for debugging; only used by print_ac
_type_reprs = {}
def type_repr(type_num):
global _type_reprs
if not _type_reprs:
from .pygram import python_symbols
# printing tokens is possible but not as useful
# from .pgen2 import token // token.__dict__.items():
for name, val in python_symbols.__dict__.items():
if type(val) == int: _type_reprs[val] = name
return _type_reprs.setdefault(type_num, type_num)
This diff is collapsed.
...@@ -24,6 +24,7 @@ class BaseFix(object): ...@@ -24,6 +24,7 @@ class BaseFix(object):
PATTERN = None # Most subclasses should override with a string literal PATTERN = None # Most subclasses should override with a string literal
pattern = None # Compiled pattern, set by compile_pattern() pattern = None # Compiled pattern, set by compile_pattern()
pattern_tree = None # Tree representation of the pattern
options = None # Options object passed to initializer options = None # Options object passed to initializer
filename = None # The filename (set by set_filename) filename = None # The filename (set by set_filename)
logger = None # A logger (set by set_filename) logger = None # A logger (set by set_filename)
...@@ -36,6 +37,12 @@ class BaseFix(object): ...@@ -36,6 +37,12 @@ class BaseFix(object):
_accept_type = None # [Advanced and not public] This tells RefactoringTool _accept_type = None # [Advanced and not public] This tells RefactoringTool
# which node type to accept when there's not a pattern. # which node type to accept when there's not a pattern.
keep_line_order = False # For the bottom matcher: match with the
# original line order
BM_compatible = False # Compatibility with the bottom matching
# module; every fixer should set this
# manually
# Shortcut for access to Python grammar symbols # Shortcut for access to Python grammar symbols
syms = pygram.python_symbols syms = pygram.python_symbols
...@@ -58,7 +65,9 @@ class BaseFix(object): ...@@ -58,7 +65,9 @@ class BaseFix(object):
self.{pattern,PATTERN} in .match(). self.{pattern,PATTERN} in .match().
""" """
if self.PATTERN is not None: if self.PATTERN is not None:
self.pattern = PatternCompiler().compile_pattern(self.PATTERN) PC = PatternCompiler()
self.pattern, self.pattern_tree = PC.compile_pattern(self.PATTERN,
with_tree=True)
def set_filename(self, filename): def set_filename(self, filename):
"""Set the filename, and a logger derived from it. """Set the filename, and a logger derived from it.
......
...@@ -295,8 +295,8 @@ def touch_import(package, name, node): ...@@ -295,8 +295,8 @@ def touch_import(package, name, node):
""" Works like `does_tree_import` but adds an import statement """ Works like `does_tree_import` but adds an import statement
if it was not imported. """ if it was not imported. """
def is_import_stmt(node): def is_import_stmt(node):
return node.type == syms.simple_stmt and node.children and \ return (node.type == syms.simple_stmt and node.children and
is_import(node.children[0]) is_import(node.children[0]))
root = find_root(node) root = find_root(node)
...@@ -319,8 +319,8 @@ def touch_import(package, name, node): ...@@ -319,8 +319,8 @@ def touch_import(package, name, node):
# if that also fails, we stick to the beginning of the file # if that also fails, we stick to the beginning of the file
if insert_pos == 0: if insert_pos == 0:
for idx, node in enumerate(root.children): for idx, node in enumerate(root.children):
if node.type == syms.simple_stmt and node.children and \ if (node.type == syms.simple_stmt and node.children and
node.children[0].type == token.STRING: node.children[0].type == token.STRING):
insert_pos = idx + 1 insert_pos = idx + 1
break break
......
...@@ -12,6 +12,7 @@ from .. import fixer_base ...@@ -12,6 +12,7 @@ from .. import fixer_base
from ..fixer_util import Call, Comma, parenthesize from ..fixer_util import Call, Comma, parenthesize
class FixApply(fixer_base.BaseFix): class FixApply(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< 'apply' power< 'apply'
......
...@@ -6,6 +6,7 @@ from .. import fixer_base ...@@ -6,6 +6,7 @@ from .. import fixer_base
from ..fixer_util import Name from ..fixer_util import Name
class FixBasestring(fixer_base.BaseFix): class FixBasestring(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "'basestring'" PATTERN = "'basestring'"
......
...@@ -9,6 +9,7 @@ from ..fixer_util import Name ...@@ -9,6 +9,7 @@ from ..fixer_util import Name
class FixBuffer(fixer_base.BaseFix): class FixBuffer(fixer_base.BaseFix):
BM_compatible = True
explicit = True # The user must ask for this fixer explicit = True # The user must ask for this fixer
......
...@@ -11,6 +11,9 @@ from lib2to3 import fixer_base ...@@ -11,6 +11,9 @@ from lib2to3 import fixer_base
from lib2to3.fixer_util import Call, Name, String, Attr, touch_import from lib2to3.fixer_util import Call, Name, String, Attr, touch_import
class FixCallable(fixer_base.BaseFix): class FixCallable(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
# Ignore callable(*args) or use of keywords. # Ignore callable(*args) or use of keywords.
# Either could be a hint that the builtin callable() is not being used. # Either could be a hint that the builtin callable() is not being used.
......
...@@ -40,6 +40,8 @@ iter_exempt = fixer_util.consuming_calls | set(["iter"]) ...@@ -40,6 +40,8 @@ iter_exempt = fixer_util.consuming_calls | set(["iter"])
class FixDict(fixer_base.BaseFix): class FixDict(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< head=any+ power< head=any+
trailer< '.' method=('keys'|'items'|'values'| trailer< '.' method=('keys'|'items'|'values'|
......
...@@ -34,6 +34,7 @@ def find_excepts(nodes): ...@@ -34,6 +34,7 @@ def find_excepts(nodes):
yield (n, nodes[i+2]) yield (n, nodes[i+2])
class FixExcept(fixer_base.BaseFix): class FixExcept(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
try_stmt< 'try' ':' (simple_stmt | suite) try_stmt< 'try' ':' (simple_stmt | suite)
......
...@@ -16,6 +16,7 @@ from ..fixer_util import Comma, Name, Call ...@@ -16,6 +16,7 @@ from ..fixer_util import Comma, Name, Call
class FixExec(fixer_base.BaseFix): class FixExec(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
exec_stmt< 'exec' a=any 'in' b=any [',' c=any] > exec_stmt< 'exec' a=any 'in' b=any [',' c=any] >
......
...@@ -13,6 +13,7 @@ from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node, ...@@ -13,6 +13,7 @@ from ..fixer_util import (Comma, Name, Call, LParen, RParen, Dot, Node,
class FixExecfile(fixer_base.BaseFix): class FixExecfile(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > > power< 'execfile' trailer< '(' arglist< filename=any [',' globals=any [',' locals=any ] ] > ')' > >
......
...@@ -9,6 +9,8 @@ from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms ...@@ -9,6 +9,8 @@ from lib2to3.fixer_util import Name, Attr, Call, Comma, Newline, syms
class FixExitfunc(fixer_base.BaseFix): class FixExitfunc(fixer_base.BaseFix):
keep_line_order = True
BM_compatible = True
PATTERN = """ PATTERN = """
( (
......
...@@ -19,6 +19,7 @@ from .. import fixer_base ...@@ -19,6 +19,7 @@ from .. import fixer_base
from ..fixer_util import Name, Call, ListComp, in_special_context from ..fixer_util import Name, Call, ListComp, in_special_context
class FixFilter(fixer_base.ConditionalFix): class FixFilter(fixer_base.ConditionalFix):
BM_compatible = True
PATTERN = """ PATTERN = """
filter_lambda=power< filter_lambda=power<
......
...@@ -7,6 +7,8 @@ from ..fixer_util import Name ...@@ -7,6 +7,8 @@ from ..fixer_util import Name
class FixFuncattrs(fixer_base.BaseFix): class FixFuncattrs(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals' power< any+ trailer< '.' attr=('func_closure' | 'func_doc' | 'func_globals'
| 'func_name' | 'func_defaults' | 'func_code' | 'func_name' | 'func_defaults' | 'func_code'
......
...@@ -9,6 +9,8 @@ from .. import fixer_base ...@@ -9,6 +9,8 @@ from .. import fixer_base
from ..fixer_util import BlankLine from ..fixer_util import BlankLine
class FixFuture(fixer_base.BaseFix): class FixFuture(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """import_from< 'from' module_name="__future__" 'import' any >""" PATTERN = """import_from< 'from' module_name="__future__" 'import' any >"""
# This should be run last -- some things check for the import # This should be run last -- some things check for the import
......
...@@ -8,6 +8,7 @@ from .. import fixer_base ...@@ -8,6 +8,7 @@ from .. import fixer_base
from ..fixer_util import Name from ..fixer_util import Name
class FixGetcwdu(fixer_base.BaseFix): class FixGetcwdu(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< 'os' trailer< dot='.' name='getcwdu' > any* > power< 'os' trailer< dot='.' name='getcwdu' > any* >
......
...@@ -37,6 +37,7 @@ from ..fixer_util import Name, parenthesize ...@@ -37,6 +37,7 @@ from ..fixer_util import Name, parenthesize
class FixHasKey(fixer_base.BaseFix): class FixHasKey(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
anchor=power< anchor=power<
......
...@@ -35,7 +35,6 @@ CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)" ...@@ -35,7 +35,6 @@ CMP = "(n='!=' | '==' | 'is' | n=comp_op< 'is' 'not' >)"
TYPE = "power< 'type' trailer< '(' x=any ')' > >" TYPE = "power< 'type' trailer< '(' x=any ')' > >"
class FixIdioms(fixer_base.BaseFix): class FixIdioms(fixer_base.BaseFix):
explicit = True # The user must ask for this fixer explicit = True # The user must ask for this fixer
PATTERN = r""" PATTERN = r"""
......
...@@ -36,6 +36,7 @@ def traverse_imports(names): ...@@ -36,6 +36,7 @@ def traverse_imports(names):
class FixImport(fixer_base.BaseFix): class FixImport(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
import_from< 'from' imp=any 'import' ['('] any [')'] > import_from< 'from' imp=any 'import' ['('] any [')'] >
......
...@@ -84,6 +84,8 @@ def build_pattern(mapping=MAPPING): ...@@ -84,6 +84,8 @@ def build_pattern(mapping=MAPPING):
class FixImports(fixer_base.BaseFix): class FixImports(fixer_base.BaseFix):
BM_compatible = True
keep_line_order = True
# This is overridden in fix_imports2. # This is overridden in fix_imports2.
mapping = MAPPING mapping = MAPPING
......
...@@ -11,7 +11,7 @@ context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >") ...@@ -11,7 +11,7 @@ context = patcomp.compile_pattern("power< 'eval' trailer< '(' any ')' > >")
class FixInput(fixer_base.BaseFix): class FixInput(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< 'input' args=trailer< '(' [any] ')' > > power< 'input' args=trailer< '(' [any] ')' > >
""" """
......
...@@ -12,6 +12,8 @@ from ..fixer_util import Name, Attr, touch_import ...@@ -12,6 +12,8 @@ from ..fixer_util import Name, Attr, touch_import
class FixIntern(fixer_base.BaseFix): class FixIntern(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """ PATTERN = """
power< 'intern' power< 'intern'
......
...@@ -14,7 +14,7 @@ from ..fixer_util import token ...@@ -14,7 +14,7 @@ from ..fixer_util import token
class FixIsinstance(fixer_base.BaseFix): class FixIsinstance(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< power<
'isinstance' 'isinstance'
......
...@@ -12,6 +12,7 @@ from .. import fixer_base ...@@ -12,6 +12,7 @@ from .. import fixer_base
from ..fixer_util import Name from ..fixer_util import Name
class FixItertools(fixer_base.BaseFix): class FixItertools(fixer_base.BaseFix):
BM_compatible = True
it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')" it_funcs = "('imap'|'ifilter'|'izip'|'ifilterfalse')"
PATTERN = """ PATTERN = """
power< it='itertools' power< it='itertools'
......
...@@ -6,6 +6,7 @@ from lib2to3.fixer_util import BlankLine, syms, token ...@@ -6,6 +6,7 @@ from lib2to3.fixer_util import BlankLine, syms, token
class FixItertoolsImports(fixer_base.BaseFix): class FixItertoolsImports(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
import_from< 'from' 'itertools' 'import' imports=any > import_from< 'from' 'itertools' 'import' imports=any >
""" %(locals()) """ %(locals())
......
...@@ -10,7 +10,7 @@ from lib2to3.fixer_util import is_probably_builtin ...@@ -10,7 +10,7 @@ from lib2to3.fixer_util import is_probably_builtin
class FixLong(fixer_base.BaseFix): class FixLong(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "'long'" PATTERN = "'long'"
def transform(self, node, results): def transform(self, node, results):
......
...@@ -26,6 +26,7 @@ from ..fixer_util import Name, Call, ListComp, in_special_context ...@@ -26,6 +26,7 @@ from ..fixer_util import Name, Call, ListComp, in_special_context
from ..pygram import python_symbols as syms from ..pygram import python_symbols as syms
class FixMap(fixer_base.ConditionalFix): class FixMap(fixer_base.ConditionalFix):
BM_compatible = True
PATTERN = """ PATTERN = """
map_none=power< map_none=power<
......
...@@ -143,6 +143,7 @@ def fixup_indent(suite): ...@@ -143,6 +143,7 @@ def fixup_indent(suite):
class FixMetaclass(fixer_base.BaseFix): class FixMetaclass(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
classdef<any*> classdef<any*>
......
...@@ -13,6 +13,7 @@ MAP = { ...@@ -13,6 +13,7 @@ MAP = {
} }
class FixMethodattrs(fixer_base.BaseFix): class FixMethodattrs(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* > power< any+ trailer< '.' attr=('im_func' | 'im_self' | 'im_class') > any* >
""" """
......
...@@ -15,6 +15,7 @@ bind_warning = "Calls to builtin next() possibly shadowed by global binding" ...@@ -15,6 +15,7 @@ bind_warning = "Calls to builtin next() possibly shadowed by global binding"
class FixNext(fixer_base.BaseFix): class FixNext(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > > power< base=any+ trailer< '.' attr='next' > trailer< '(' ')' > >
| |
......
...@@ -6,6 +6,7 @@ from .. import fixer_base ...@@ -6,6 +6,7 @@ from .. import fixer_base
from ..fixer_util import Name, syms from ..fixer_util import Name, syms
class FixNonzero(fixer_base.BaseFix): class FixNonzero(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
classdef< 'class' any+ ':' classdef< 'class' any+ ':'
suite< any* suite< any*
......
...@@ -16,7 +16,16 @@ from lib2to3 import fixer_base ...@@ -16,7 +16,16 @@ from lib2to3 import fixer_base
from lib2to3.fixer_util import Call, Name, String, touch_import from lib2to3.fixer_util import Call, Name, String, touch_import
def invocation(s):
def dec(f):
f.invocation = s
return f
return dec
class FixOperator(fixer_base.BaseFix): class FixOperator(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
methods = """ methods = """
method=('isCallable'|'sequenceIncludes' method=('isCallable'|'sequenceIncludes'
...@@ -36,34 +45,34 @@ class FixOperator(fixer_base.BaseFix): ...@@ -36,34 +45,34 @@ class FixOperator(fixer_base.BaseFix):
if method is not None: if method is not None:
return method(node, results) return method(node, results)
@invocation("operator.contains(%s)")
def _sequenceIncludes(self, node, results): def _sequenceIncludes(self, node, results):
"""operator.contains(%s)"""
return self._handle_rename(node, results, "contains") return self._handle_rename(node, results, "contains")
@invocation("hasattr(%s, '__call__')")
def _isCallable(self, node, results): def _isCallable(self, node, results):
"""hasattr(%s, '__call__')"""
obj = results["obj"] obj = results["obj"]
args = [obj.clone(), String(", "), String("'__call__'")] args = [obj.clone(), String(", "), String("'__call__'")]
return Call(Name("hasattr"), args, prefix=node.prefix) return Call(Name("hasattr"), args, prefix=node.prefix)
@invocation("operator.mul(%s)")
def _repeat(self, node, results): def _repeat(self, node, results):
"""operator.mul(%s)"""
return self._handle_rename(node, results, "mul") return self._handle_rename(node, results, "mul")
@invocation("operator.imul(%s)")
def _irepeat(self, node, results): def _irepeat(self, node, results):
"""operator.imul(%s)"""
return self._handle_rename(node, results, "imul") return self._handle_rename(node, results, "imul")
@invocation("isinstance(%s, collections.Sequence)")
def _isSequenceType(self, node, results): def _isSequenceType(self, node, results):
"""isinstance(%s, collections.Sequence)"""
return self._handle_type2abc(node, results, "collections", "Sequence") return self._handle_type2abc(node, results, "collections", "Sequence")
@invocation("isinstance(%s, collections.Mapping)")
def _isMappingType(self, node, results): def _isMappingType(self, node, results):
"""isinstance(%s, collections.Mapping)"""
return self._handle_type2abc(node, results, "collections", "Mapping") return self._handle_type2abc(node, results, "collections", "Mapping")
@invocation("isinstance(%s, numbers.Number)")
def _isNumberType(self, node, results): def _isNumberType(self, node, results):
"""isinstance(%s, numbers.Number)"""
return self._handle_type2abc(node, results, "numbers", "Number") return self._handle_type2abc(node, results, "numbers", "Number")
def _handle_rename(self, node, results, name): def _handle_rename(self, node, results, name):
...@@ -84,6 +93,6 @@ class FixOperator(fixer_base.BaseFix): ...@@ -84,6 +93,6 @@ class FixOperator(fixer_base.BaseFix):
return method return method
else: else:
sub = (str(results["obj"]),) sub = (str(results["obj"]),)
invocation_str = str(method.__doc__) % sub invocation_str = method.invocation % sub
self.warning(node, "You should use '%s' here." % invocation_str) self.warning(node, "You should use '%s' here." % invocation_str)
return None return None
...@@ -10,6 +10,8 @@ from ..fixer_util import LParen, RParen ...@@ -10,6 +10,8 @@ from ..fixer_util import LParen, RParen
# XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2] # XXX This doesn't support nested for loops like [x for x in 1, 2 for x in 1, 2]
class FixParen(fixer_base.BaseFix): class FixParen(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
atom< ('[' | '(') atom< ('[' | '(')
(listmaker< any (listmaker< any
......
...@@ -28,6 +28,8 @@ parend_expr = patcomp.compile_pattern( ...@@ -28,6 +28,8 @@ parend_expr = patcomp.compile_pattern(
class FixPrint(fixer_base.BaseFix): class FixPrint(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
simple_stmt< any* bare='print' any* > | print_stmt simple_stmt< any* bare='print' any* > | print_stmt
""" """
......
...@@ -4,6 +4,7 @@ raise -> raise ...@@ -4,6 +4,7 @@ raise -> raise
raise E -> raise E raise E -> raise E
raise E, V -> raise E(V) raise E, V -> raise E(V)
raise E, V, T -> raise E(V).with_traceback(T) raise E, V, T -> raise E(V).with_traceback(T)
raise E, None, T -> raise E.with_traceback(T)
raise (((E, E'), E''), E'''), V -> raise E(V) raise (((E, E'), E''), E'''), V -> raise E(V)
raise "foo", V, T -> warns about string exceptions raise "foo", V, T -> warns about string exceptions
...@@ -29,6 +30,7 @@ from ..fixer_util import Name, Call, Attr, ArgList, is_tuple ...@@ -29,6 +30,7 @@ from ..fixer_util import Name, Call, Attr, ArgList, is_tuple
class FixRaise(fixer_base.BaseFix): class FixRaise(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] > raise_stmt< 'raise' exc=any [',' val=any [',' tb=any]] >
""" """
...@@ -37,8 +39,9 @@ class FixRaise(fixer_base.BaseFix): ...@@ -37,8 +39,9 @@ class FixRaise(fixer_base.BaseFix):
syms = self.syms syms = self.syms
exc = results["exc"].clone() exc = results["exc"].clone()
if exc.type is token.STRING: if exc.type == token.STRING:
self.cannot_convert(node, "Python 3 does not support string exceptions") msg = "Python 3 does not support string exceptions"
self.cannot_convert(node, msg)
return return
# Python 2 supports # Python 2 supports
...@@ -71,7 +74,12 @@ class FixRaise(fixer_base.BaseFix): ...@@ -71,7 +74,12 @@ class FixRaise(fixer_base.BaseFix):
tb = results["tb"].clone() tb = results["tb"].clone()
tb.prefix = "" tb.prefix = ""
e = Call(exc, args) e = exc
# If there's a traceback and None is passed as the value, then don't
# add a call, since the user probably just wants to add a
# traceback. See issue #9661.
if val.type != token.NAME or val.value != "None":
e = Call(exc, args)
with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])] with_tb = Attr(e, Name('with_traceback')) + [ArgList([tb])]
new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb) new = pytree.Node(syms.simple_stmt, [Name("raise")] + with_tb)
new.prefix = node.prefix new.prefix = node.prefix
......
...@@ -7,6 +7,7 @@ from ..fixer_util import Name ...@@ -7,6 +7,7 @@ from ..fixer_util import Name
class FixRawInput(fixer_base.BaseFix): class FixRawInput(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< name='raw_input' trailer< '(' [any] ')' > any* > power< name='raw_input' trailer< '(' [any] ')' > any* >
""" """
......
...@@ -14,6 +14,9 @@ from lib2to3.fixer_util import touch_import ...@@ -14,6 +14,9 @@ from lib2to3.fixer_util import touch_import
class FixReduce(fixer_base.BaseFix): class FixReduce(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """ PATTERN = """
power< 'reduce' power< 'reduce'
trailer< '(' trailer< '('
......
...@@ -40,6 +40,7 @@ def build_pattern(): ...@@ -40,6 +40,7 @@ def build_pattern():
class FixRenames(fixer_base.BaseFix): class FixRenames(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "|".join(build_pattern()) PATTERN = "|".join(build_pattern())
order = "pre" # Pre-order tree traversal order = "pre" # Pre-order tree traversal
......
...@@ -10,6 +10,7 @@ from ..fixer_util import Call, Name, parenthesize ...@@ -10,6 +10,7 @@ from ..fixer_util import Call, Name, parenthesize
class FixRepr(fixer_base.BaseFix): class FixRepr(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
atom < '`' expr=any '`' > atom < '`' expr=any '`' >
""" """
......
...@@ -11,6 +11,7 @@ from lib2to3.fixer_util import token, syms ...@@ -11,6 +11,7 @@ from lib2to3.fixer_util import token, syms
class FixSetLiteral(fixer_base.BaseFix): class FixSetLiteral(fixer_base.BaseFix):
BM_compatible = True
explicit = True explicit = True
PATTERN = """power< 'set' trailer< '(' PATTERN = """power< 'set' trailer< '('
......
...@@ -9,7 +9,7 @@ from ..fixer_util import Name ...@@ -9,7 +9,7 @@ from ..fixer_util import Name
class FixStandarderror(fixer_base.BaseFix): class FixStandarderror(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
'StandardError' 'StandardError'
""" """
......
...@@ -14,6 +14,7 @@ from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms ...@@ -14,6 +14,7 @@ from ..fixer_util import Attr, Call, Name, Number, Subscript, Node, syms
class FixSysExc(fixer_base.BaseFix): class FixSysExc(fixer_base.BaseFix):
# This order matches the ordering of sys.exc_info(). # This order matches the ordering of sys.exc_info().
exc_info = ["exc_type", "exc_value", "exc_traceback"] exc_info = ["exc_type", "exc_value", "exc_traceback"]
BM_compatible = True
PATTERN = """ PATTERN = """
power< 'sys' trailer< dot='.' attribute=(%s) > > power< 'sys' trailer< dot='.' attribute=(%s) > >
""" % '|'.join("'%s'" % e for e in exc_info) """ % '|'.join("'%s'" % e for e in exc_info)
......
...@@ -14,7 +14,7 @@ from .. import fixer_base ...@@ -14,7 +14,7 @@ from .. import fixer_base
from ..fixer_util import Name, Call, ArgList, Attr, is_tuple from ..fixer_util import Name, Call, ArgList, Attr, is_tuple
class FixThrow(fixer_base.BaseFix): class FixThrow(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< any trailer< '.' 'throw' > power< any trailer< '.' 'throw' >
trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' > trailer< '(' args=arglist< exc=any ',' val=any [',' tb=any] > ')' >
......
...@@ -29,6 +29,10 @@ def is_docstring(stmt): ...@@ -29,6 +29,10 @@ def is_docstring(stmt):
stmt.children[0].type == token.STRING stmt.children[0].type == token.STRING
class FixTupleParams(fixer_base.BaseFix): class FixTupleParams(fixer_base.BaseFix):
run_order = 4 #use a lower order since lambda is part of other
#patterns
BM_compatible = True
PATTERN = """ PATTERN = """
funcdef< 'def' any parameters< '(' args=any ')' > funcdef< 'def' any parameters< '(' args=any ')' >
['->' any] ':' suite=any+ > ['->' any] ':' suite=any+ >
......
...@@ -52,7 +52,7 @@ _TYPE_MAPPING = { ...@@ -52,7 +52,7 @@ _TYPE_MAPPING = {
_pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING] _pats = ["power< 'types' trailer< '.' name='%s' > >" % t for t in _TYPE_MAPPING]
class FixTypes(fixer_base.BaseFix): class FixTypes(fixer_base.BaseFix):
BM_compatible = True
PATTERN = '|'.join(_pats) PATTERN = '|'.join(_pats)
def transform(self, node, results): def transform(self, node, results):
......
...@@ -10,7 +10,7 @@ _mapping = {"unichr" : "chr", "unicode" : "str"} ...@@ -10,7 +10,7 @@ _mapping = {"unichr" : "chr", "unicode" : "str"}
_literal_re = re.compile(r"[uU][rR]?[\'\"]") _literal_re = re.compile(r"[uU][rR]?[\'\"]")
class FixUnicode(fixer_base.BaseFix): class FixUnicode(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "STRING | 'unicode' | 'unichr'" PATTERN = "STRING | 'unicode' | 'unichr'"
def transform(self, node, results): def transform(self, node, results):
......
...@@ -8,7 +8,7 @@ ...@@ -8,7 +8,7 @@
from lib2to3.fixes.fix_imports import alternates, FixImports from lib2to3.fixes.fix_imports import alternates, FixImports
from lib2to3 import fixer_base from lib2to3 import fixer_base
from lib2to3.fixer_util import (Name, Comma, FromImport, Newline, from lib2to3.fixer_util import (Name, Comma, FromImport, Newline,
find_indentation) find_indentation, Node, syms)
MAPPING = {"urllib": [ MAPPING = {"urllib": [
("urllib.request", ("urllib.request",
...@@ -121,26 +121,37 @@ class FixUrllib(FixImports): ...@@ -121,26 +121,37 @@ class FixUrllib(FixImports):
mod_dict = {} mod_dict = {}
members = results["members"] members = results["members"]
for member in members: for member in members:
member = member.value
# we only care about the actual members # we only care about the actual members
if member != ",": if member.type == syms.import_as_name:
as_name = member.children[2].value
member_name = member.children[0].value
else:
member_name = member.value
as_name = None
if member_name != ",":
for change in MAPPING[mod_member.value]: for change in MAPPING[mod_member.value]:
if member in change[1]: if member_name in change[1]:
if change[0] in mod_dict: if change[0] not in mod_dict:
mod_dict[change[0]].append(member)
else:
mod_dict[change[0]] = [member]
modules.append(change[0]) modules.append(change[0])
mod_dict.setdefault(change[0], []).append(member)
new_nodes = [] new_nodes = []
indentation = find_indentation(node) indentation = find_indentation(node)
first = True first = True
def handle_name(name, prefix):
if name.type == syms.import_as_name:
kids = [Name(name.children[0].value, prefix=prefix),
name.children[1].clone(),
name.children[2].clone()]
return [Node(syms.import_as_name, kids)]
return [Name(name.value, prefix=prefix)]
for module in modules: for module in modules:
elts = mod_dict[module] elts = mod_dict[module]
names = [] names = []
for elt in elts[:-1]: for elt in elts[:-1]:
names.extend([Name(elt, prefix=pref), Comma()]) names.extend(handle_name(elt, pref))
names.append(Name(elts[-1], prefix=pref)) names.append(Comma())
names.extend(handle_name(elts[-1], pref))
new = FromImport(module, names) new = FromImport(module, names)
if not first or node.parent.prefix.endswith(indentation): if not first or node.parent.prefix.endswith(indentation):
new.prefix = indentation new.prefix = indentation
......
...@@ -10,7 +10,7 @@ from .. import patcomp ...@@ -10,7 +10,7 @@ from .. import patcomp
class FixXrange(fixer_base.BaseFix): class FixXrange(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< power<
(name='range'|name='xrange') trailer< '(' args=any ')' > (name='range'|name='xrange') trailer< '(' args=any ')' >
......
...@@ -9,6 +9,7 @@ from ..fixer_util import Name ...@@ -9,6 +9,7 @@ from ..fixer_util import Name
class FixXreadlines(fixer_base.BaseFix): class FixXreadlines(fixer_base.BaseFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > > power< call=any+ trailer< '.' 'xreadlines' > trailer< '(' ')' > >
| |
......
...@@ -13,6 +13,7 @@ from ..fixer_util import Name, Call, in_special_context ...@@ -13,6 +13,7 @@ from ..fixer_util import Name, Call, in_special_context
class FixZip(fixer_base.ConditionalFix): class FixZip(fixer_base.ConditionalFix):
BM_compatible = True
PATTERN = """ PATTERN = """
power< 'zip' args=trailer< '(' [any] ')' > power< 'zip' args=trailer< '(' [any] ')' >
> >
......
...@@ -52,14 +52,17 @@ class PatternCompiler(object): ...@@ -52,14 +52,17 @@ class PatternCompiler(object):
self.pysyms = pygram.python_symbols self.pysyms = pygram.python_symbols
self.driver = driver.Driver(self.grammar, convert=pattern_convert) self.driver = driver.Driver(self.grammar, convert=pattern_convert)
def compile_pattern(self, input, debug=False): def compile_pattern(self, input, debug=False, with_tree=False):
"""Compiles a pattern string to a nested pytree.*Pattern object.""" """Compiles a pattern string to a nested pytree.*Pattern object."""
tokens = tokenize_wrapper(input) tokens = tokenize_wrapper(input)
try: try:
root = self.driver.parse_tokens(tokens, debug=debug) root = self.driver.parse_tokens(tokens, debug=debug)
except parse.ParseError as e: except parse.ParseError as e:
raise PatternSyntaxError(str(e)) raise PatternSyntaxError(str(e))
return self.compile_node(root) if with_tree:
return self.compile_node(root), root
else:
return self.compile_node(root)
def compile_node(self, node): def compile_node(self, node):
"""Compiles a node, recursively. """Compiles a node, recursively.
......
...@@ -13,6 +13,8 @@ from . import pytree ...@@ -13,6 +13,8 @@ from . import pytree
# The grammar file # The grammar file
_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt") _GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), "Grammar.txt")
_PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
"PatternGrammar.txt")
class Symbols(object): class Symbols(object):
...@@ -33,3 +35,6 @@ python_symbols = Symbols(python_grammar) ...@@ -33,3 +35,6 @@ python_symbols = Symbols(python_grammar)
python_grammar_no_print_statement = python_grammar.copy() python_grammar_no_print_statement = python_grammar.copy()
del python_grammar_no_print_statement.keywords["print"] del python_grammar_no_print_statement.keywords["print"]
pattern_grammar = driver.load_grammar(_PATTERN_GRAMMAR_FILE)
pattern_symbols = Symbols(pattern_grammar)
...@@ -16,7 +16,6 @@ import sys ...@@ -16,7 +16,6 @@ import sys
import warnings import warnings
from io import StringIO from io import StringIO
HUGE = 0x7FFFFFFF # maximum repeat count, default max HUGE = 0x7FFFFFFF # maximum repeat count, default max
_type_reprs = {} _type_reprs = {}
...@@ -30,7 +29,6 @@ def type_repr(type_num): ...@@ -30,7 +29,6 @@ def type_repr(type_num):
if type(val) == int: _type_reprs[val] = name if type(val) == int: _type_reprs[val] = name
return _type_reprs.setdefault(type_num, type_num) return _type_reprs.setdefault(type_num, type_num)
class Base(object): class Base(object):
""" """
...@@ -47,6 +45,7 @@ class Base(object): ...@@ -47,6 +45,7 @@ class Base(object):
parent = None # Parent node pointer, or None parent = None # Parent node pointer, or None
children = () # Tuple of subnodes children = () # Tuple of subnodes
was_changed = False was_changed = False
was_checked = False
def __new__(cls, *args, **kwds): def __new__(cls, *args, **kwds):
"""Constructor that prevents Base from being instantiated.""" """Constructor that prevents Base from being instantiated."""
...@@ -213,6 +212,16 @@ class Base(object): ...@@ -213,6 +212,16 @@ class Base(object):
return None return None
return self.parent.children[i-1] return self.parent.children[i-1]
def leaves(self):
for child in self.children:
for x in child.leaves():
yield x
def depth(self):
if self.parent is None:
return 0
return 1 + self.parent.depth()
def get_suffix(self): def get_suffix(self):
""" """
Return the string immediately following the invocant node. This is Return the string immediately following the invocant node. This is
...@@ -227,12 +236,14 @@ class Base(object): ...@@ -227,12 +236,14 @@ class Base(object):
def __str__(self): def __str__(self):
return str(self).encode("ascii") return str(self).encode("ascii")
class Node(Base): class Node(Base):
"""Concrete implementation for interior nodes.""" """Concrete implementation for interior nodes."""
def __init__(self, type, children, context=None, prefix=None): def __init__(self,type, children,
context=None,
prefix=None,
fixers_applied=None):
""" """
Initializer. Initializer.
...@@ -249,6 +260,10 @@ class Node(Base): ...@@ -249,6 +260,10 @@ class Node(Base):
ch.parent = self ch.parent = self
if prefix is not None: if prefix is not None:
self.prefix = prefix self.prefix = prefix
if fixers_applied:
self.fixers_applied = fixers_applied[:]
else:
self.fixers_applied = None
def __repr__(self): def __repr__(self):
"""Return a canonical string representation.""" """Return a canonical string representation."""
...@@ -273,7 +288,8 @@ class Node(Base): ...@@ -273,7 +288,8 @@ class Node(Base):
def clone(self): def clone(self):
"""Return a cloned (deep) copy of self.""" """Return a cloned (deep) copy of self."""
return Node(self.type, [ch.clone() for ch in self.children]) return Node(self.type, [ch.clone() for ch in self.children],
fixers_applied=self.fixers_applied)
def post_order(self): def post_order(self):
"""Return a post-order iterator for the tree.""" """Return a post-order iterator for the tree."""
...@@ -341,7 +357,10 @@ class Leaf(Base): ...@@ -341,7 +357,10 @@ class Leaf(Base):
lineno = 0 # Line where this token starts in the input lineno = 0 # Line where this token starts in the input
column = 0 # Column where this token tarts in the input column = 0 # Column where this token tarts in the input
def __init__(self, type, value, context=None, prefix=None): def __init__(self, type, value,
context=None,
prefix=None,
fixers_applied=[]):
""" """
Initializer. Initializer.
...@@ -355,6 +374,7 @@ class Leaf(Base): ...@@ -355,6 +374,7 @@ class Leaf(Base):
self.value = value self.value = value
if prefix is not None: if prefix is not None:
self._prefix = prefix self._prefix = prefix
self.fixers_applied = fixers_applied[:]
def __repr__(self): def __repr__(self):
"""Return a canonical string representation.""" """Return a canonical string representation."""
...@@ -380,7 +400,11 @@ class Leaf(Base): ...@@ -380,7 +400,11 @@ class Leaf(Base):
def clone(self): def clone(self):
"""Return a cloned (deep) copy of self.""" """Return a cloned (deep) copy of self."""
return Leaf(self.type, self.value, return Leaf(self.type, self.value,
(self.prefix, (self.lineno, self.column))) (self.prefix, (self.lineno, self.column)),
fixers_applied=self.fixers_applied)
def leaves(self):
yield self
def post_order(self): def post_order(self):
"""Return a post-order iterator for the tree.""" """Return a post-order iterator for the tree."""
......
...@@ -24,7 +24,10 @@ from itertools import chain ...@@ -24,7 +24,10 @@ from itertools import chain
# Local imports # Local imports
from .pgen2 import driver, tokenize, token from .pgen2 import driver, tokenize, token
from .fixer_util import find_root
from . import pytree, pygram from . import pytree, pygram
from . import btm_utils as bu
from . import btm_matcher as bm
def get_all_fix_names(fixer_pkg, remove_prefix=True): def get_all_fix_names(fixer_pkg, remove_prefix=True):
...@@ -201,11 +204,28 @@ class RefactoringTool(object): ...@@ -201,11 +204,28 @@ class RefactoringTool(object):
logger=self.logger) logger=self.logger)
self.pre_order, self.post_order = self.get_fixers() self.pre_order, self.post_order = self.get_fixers()
self.pre_order_heads = _get_headnode_dict(self.pre_order)
self.post_order_heads = _get_headnode_dict(self.post_order)
self.files = [] # List of files that were or should be modified self.files = [] # List of files that were or should be modified
self.BM = bm.BottomMatcher()
self.bmi_pre_order = [] # Bottom Matcher incompatible fixers
self.bmi_post_order = []
for fixer in chain(self.post_order, self.pre_order):
if fixer.BM_compatible:
self.BM.add_fixer(fixer)
# remove fixers that will be handled by the bottom-up
# matcher
elif fixer in self.pre_order:
self.bmi_pre_order.append(fixer)
elif fixer in self.post_order:
self.bmi_post_order.append(fixer)
self.bmi_pre_order_heads = _get_headnode_dict(self.bmi_pre_order)
self.bmi_post_order_heads = _get_headnode_dict(self.bmi_post_order)
def get_fixers(self): def get_fixers(self):
"""Inspects the options to load the requested patterns and handlers. """Inspects the options to load the requested patterns and handlers.
...@@ -268,6 +288,7 @@ class RefactoringTool(object): ...@@ -268,6 +288,7 @@ class RefactoringTool(object):
def refactor(self, items, write=False, doctests_only=False): def refactor(self, items, write=False, doctests_only=False):
"""Refactor a list of files and directories.""" """Refactor a list of files and directories."""
for dir_or_file in items: for dir_or_file in items:
if os.path.isdir(dir_or_file): if os.path.isdir(dir_or_file):
self.refactor_dir(dir_or_file, write, doctests_only) self.refactor_dir(dir_or_file, write, doctests_only)
...@@ -378,6 +399,10 @@ class RefactoringTool(object): ...@@ -378,6 +399,10 @@ class RefactoringTool(object):
def refactor_tree(self, tree, name): def refactor_tree(self, tree, name):
"""Refactors a parse tree (modifying the tree in place). """Refactors a parse tree (modifying the tree in place).
For compatible patterns the bottom matcher module is
used. Otherwise the tree is traversed node-to-node for
matches.
Args: Args:
tree: a pytree.Node instance representing the root of the tree tree: a pytree.Node instance representing the root of the tree
to be refactored. to be refactored.
...@@ -386,11 +411,65 @@ class RefactoringTool(object): ...@@ -386,11 +411,65 @@ class RefactoringTool(object):
Returns: Returns:
True if the tree was modified, False otherwise. True if the tree was modified, False otherwise.
""" """
for fixer in chain(self.pre_order, self.post_order): for fixer in chain(self.pre_order, self.post_order):
fixer.start_tree(tree, name) fixer.start_tree(tree, name)
self.traverse_by(self.pre_order_heads, tree.pre_order()) #use traditional matching for the incompatible fixers
self.traverse_by(self.post_order_heads, tree.post_order()) self.traverse_by(self.bmi_pre_order_heads, tree.pre_order())
self.traverse_by(self.bmi_post_order_heads, tree.post_order())
# obtain a set of candidate nodes
match_set = self.BM.run(tree.leaves())
while any(match_set.values()):
for fixer in self.BM.fixers:
if fixer in match_set and match_set[fixer]:
#sort by depth; apply fixers from bottom(of the AST) to top
match_set[fixer].sort(key=pytree.Base.depth, reverse=True)
if fixer.keep_line_order:
#some fixers(eg fix_imports) must be applied
#with the original file's line order
match_set[fixer].sort(key=pytree.Base.get_lineno)
for node in list(match_set[fixer]):
if node in match_set[fixer]:
match_set[fixer].remove(node)
try:
find_root(node)
except AssertionError:
# this node has been cut off from a
# previous transformation ; skip
continue
if node.fixers_applied and fixer in node.fixers_applied:
# do not apply the same fixer again
continue
results = fixer.match(node)
if results:
new = fixer.transform(node, results)
if new is not None:
node.replace(new)
#new.fixers_applied.append(fixer)
for node in new.post_order():
# do not apply the fixer again to
# this or any subnode
if not node.fixers_applied:
node.fixers_applied = []
node.fixers_applied.append(fixer)
# update the original match set for
# the added code
new_matches = self.BM.run(new.leaves())
for fxr in new_matches:
if not fxr in match_set:
match_set[fxr]=[]
match_set[fxr].extend(new_matches[fxr])
for fixer in chain(self.pre_order, self.post_order): for fixer in chain(self.pre_order, self.post_order):
fixer.finish_tree(tree, name) fixer.finish_tree(tree, name)
......
# coding: utf-8 # coding: utf-8
print "BOM BOOM!" print "BOM BOOM!"
...@@ -868,6 +868,11 @@ class Test_raise(FixerTestCase): ...@@ -868,6 +868,11 @@ class Test_raise(FixerTestCase):
raise Exception(5).with_traceback(6) # foo""" raise Exception(5).with_traceback(6) # foo"""
self.check(b, a) self.check(b, a)
def test_None_value(self):
b = """raise Exception(5), None, tb"""
a = """raise Exception(5).with_traceback(tb)"""
self.check(b, a)
def test_tuple_value(self): def test_tuple_value(self):
b = """raise Exception, (5, 6, 7)""" b = """raise Exception, (5, 6, 7)"""
a = """raise Exception(5, 6, 7)""" a = """raise Exception(5, 6, 7)"""
...@@ -1812,6 +1817,9 @@ class Test_urllib(FixerTestCase): ...@@ -1812,6 +1817,9 @@ class Test_urllib(FixerTestCase):
b = "from %s import %s as foo_bar" % (old, member) b = "from %s import %s as foo_bar" % (old, member)
a = "from %s import %s as foo_bar" % (new, member) a = "from %s import %s as foo_bar" % (new, member)
self.check(b, a) self.check(b, a)
b = "from %s import %s as blah, %s" % (old, member, member)
a = "from %s import %s as blah, %s" % (new, member, member)
self.check(b, a)
def test_star(self): def test_star(self):
for old in self.modules: for old in self.modules:
......
...@@ -178,6 +178,27 @@ class TestNodes(support.TestCase): ...@@ -178,6 +178,27 @@ class TestNodes(support.TestCase):
self.assertEqual(str(n1), "foo**bar") self.assertEqual(str(n1), "foo**bar")
self.assertTrue(isinstance(n1.children, list)) self.assertTrue(isinstance(n1.children, list))
def test_leaves(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
l3 = pytree.Leaf(100, "fooey")
n2 = pytree.Node(1000, [l1, l2])
n3 = pytree.Node(1000, [l3])
n1 = pytree.Node(1000, [n2, n3])
self.assertEqual(list(n1.leaves()), [l1, l2, l3])
def test_depth(self):
l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar")
n2 = pytree.Node(1000, [l1, l2])
n3 = pytree.Node(1000, [])
n1 = pytree.Node(1000, [n2, n3])
self.assertEqual(l1.depth(), 2)
self.assertEqual(n3.depth(), 1)
self.assertEqual(n1.depth(), 0)
def test_post_order(self): def test_post_order(self):
l1 = pytree.Leaf(100, "foo") l1 = pytree.Leaf(100, "foo")
l2 = pytree.Leaf(100, "bar") l2 = pytree.Leaf(100, "bar")
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment