Commit 8059e1e2 authored by Benjamin Peterson's avatar Benjamin Peterson

Merged revisions 88535,88661 via svnmerge from

svn+ssh://pythondev@svn.python.org/sandbox/trunk/2to3/lib2to3

........
  r88535 | brett.cannon | 2011-02-23 13:46:46 -0600 (Wed, 23 Feb 2011) | 1 line

  Add lib2to3.__main__ for easy testing from the console.
........
  r88661 | benjamin.peterson | 2011-02-26 16:06:24 -0600 (Sat, 26 Feb 2011) | 6 lines

  fix refactoring on formfeed characters #11250

  This is because text.splitlines() is not the same as
  list(StringIO.StringIO(text)).
........
parent aeb187a2
import sys
from .main import main
sys.exit(main("lib2to3.fixes"))
...@@ -12,6 +12,7 @@ __author__ = "Guido van Rossum <guido@python.org>" ...@@ -12,6 +12,7 @@ __author__ = "Guido van Rossum <guido@python.org>"
# Python imports # Python imports
import os import os
import StringIO
# Fairly local imports # Fairly local imports
from .pgen2 import driver, literals, token, tokenize, parse, grammar from .pgen2 import driver, literals, token, tokenize, parse, grammar
...@@ -32,7 +33,7 @@ class PatternSyntaxError(Exception): ...@@ -32,7 +33,7 @@ class PatternSyntaxError(Exception):
def tokenize_wrapper(input): def tokenize_wrapper(input):
"""Tokenizes a string suppressing significant whitespace.""" """Tokenizes a string suppressing significant whitespace."""
skip = set((token.NEWLINE, token.INDENT, token.DEDENT)) skip = set((token.NEWLINE, token.INDENT, token.DEDENT))
tokens = tokenize.generate_tokens(driver.generate_lines(input).next) tokens = tokenize.generate_tokens(StringIO.StringIO(input).readline)
for quintuple in tokens: for quintuple in tokens:
type, value, start, end, line_text = quintuple type, value, start, end, line_text = quintuple
if type not in skip: if type not in skip:
......
...@@ -19,6 +19,7 @@ __all__ = ["Driver", "load_grammar"] ...@@ -19,6 +19,7 @@ __all__ = ["Driver", "load_grammar"]
import codecs import codecs
import os import os
import logging import logging
import StringIO
import sys import sys
# Pgen imports # Pgen imports
...@@ -101,18 +102,10 @@ class Driver(object): ...@@ -101,18 +102,10 @@ class Driver(object):
def parse_string(self, text, debug=False): def parse_string(self, text, debug=False):
"""Parse a string and return the syntax tree.""" """Parse a string and return the syntax tree."""
tokens = tokenize.generate_tokens(generate_lines(text).next) tokens = tokenize.generate_tokens(StringIO.StringIO(text).readline)
return self.parse_tokens(tokens, debug) return self.parse_tokens(tokens, debug)
def generate_lines(text):
"""Generator that behaves like readline without using StringIO."""
for line in text.splitlines(True):
yield line
while True:
yield ""
def load_grammar(gt="Grammar.txt", gp=None, def load_grammar(gt="Grammar.txt", gp=None,
save=True, force=False, logger=None): save=True, force=False, logger=None):
"""Load the grammar (maybe from a pickle).""" """Load the grammar (maybe from a pickle)."""
......
...@@ -19,6 +19,16 @@ import sys ...@@ -19,6 +19,16 @@ import sys
# Local imports # Local imports
from lib2to3.pgen2 import tokenize from lib2to3.pgen2 import tokenize
from ..pgen2.parse import ParseError from ..pgen2.parse import ParseError
from lib2to3.pygram import python_symbols as syms
class TestDriver(support.TestCase):
def test_formfeed(self):
s = """print 1\n\x0Cprint 2\n"""
t = driver.parse_string(s)
self.assertEqual(t.children[0].children[0].type, syms.print_stmt)
self.assertEqual(t.children[1].children[0].type, syms.print_stmt)
class GrammarTest(support.TestCase): class GrammarTest(support.TestCase):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment