Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
C
cpython
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
Kirill Smelkov
cpython
Commits
2e88ffdf
Commit
2e88ffdf
authored
Mar 19, 2008
by
Martin v. Löwis
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Run 2to3 on this library.
parent
9f9240b8
Changes
22
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
22 changed files
with
196 additions
and
194 deletions
+196
-194
Lib/lib2to3/fixes/basefix.py
Lib/lib2to3/fixes/basefix.py
+1
-1
Lib/lib2to3/fixes/fix_imports.py
Lib/lib2to3/fixes/fix_imports.py
+3
-3
Lib/lib2to3/fixes/fix_renames.py
Lib/lib2to3/fixes/fix_renames.py
+2
-2
Lib/lib2to3/fixes/util.py
Lib/lib2to3/fixes/util.py
+1
-1
Lib/lib2to3/patcomp.py
Lib/lib2to3/patcomp.py
+1
-1
Lib/lib2to3/pgen2/conv.py
Lib/lib2to3/pgen2/conv.py
+33
-33
Lib/lib2to3/pgen2/driver.py
Lib/lib2to3/pgen2/driver.py
+1
-1
Lib/lib2to3/pgen2/grammar.py
Lib/lib2to3/pgen2/grammar.py
+6
-6
Lib/lib2to3/pgen2/literals.py
Lib/lib2to3/pgen2/literals.py
+1
-1
Lib/lib2to3/pgen2/pgen.py
Lib/lib2to3/pgen2/pgen.py
+20
-20
Lib/lib2to3/pgen2/token.py
Lib/lib2to3/pgen2/token.py
+1
-1
Lib/lib2to3/pgen2/tokenize.py
Lib/lib2to3/pgen2/tokenize.py
+9
-7
Lib/lib2to3/pygram.py
Lib/lib2to3/pygram.py
+1
-1
Lib/lib2to3/pytree.py
Lib/lib2to3/pytree.py
+3
-3
Lib/lib2to3/refactor.py
Lib/lib2to3/refactor.py
+15
-15
Lib/lib2to3/tests/benchmark.py
Lib/lib2to3/tests/benchmark.py
+4
-4
Lib/lib2to3/tests/data/py2_test_grammar.py
Lib/lib2to3/tests/data/py2_test_grammar.py
+69
-69
Lib/lib2to3/tests/data/py3_test_grammar.py
Lib/lib2to3/tests/data/py3_test_grammar.py
+2
-2
Lib/lib2to3/tests/pytree_idempotency.py
Lib/lib2to3/tests/pytree_idempotency.py
+9
-9
Lib/lib2to3/tests/test_all_fixers.py
Lib/lib2to3/tests/test_all_fixers.py
+2
-2
Lib/lib2to3/tests/test_fixers.py
Lib/lib2to3/tests/test_fixers.py
+11
-11
Lib/lib2to3/tests/test_parser.py
Lib/lib2to3/tests/test_parser.py
+1
-1
No files found.
Lib/lib2to3/fixes/basefix.py
View file @
2e88ffdf
...
@@ -108,7 +108,7 @@ class BaseFix(object):
...
@@ -108,7 +108,7 @@ class BaseFix(object):
"""
"""
name
=
template
name
=
template
while
name
in
self
.
used_names
:
while
name
in
self
.
used_names
:
name
=
template
+
str
(
self
.
numbers
.
next
(
))
name
=
template
+
str
(
next
(
self
.
numbers
))
self
.
used_names
.
add
(
name
)
self
.
used_names
.
add
(
name
)
return
name
return
name
...
...
Lib/lib2to3/fixes/fix_imports.py
View file @
2e88ffdf
...
@@ -10,8 +10,8 @@ Fixes:
...
@@ -10,8 +10,8 @@ Fixes:
# Local imports
# Local imports
from
.
import
basefix
from
.
import
basefix
from
.util
import
Name
,
attr_chain
,
any
,
set
from
.util
import
Name
,
attr_chain
,
any
,
set
import
__builtin__
import
builtins
builtin_names
=
[
name
for
name
in
dir
(
__builtin__
)
builtin_names
=
[
name
for
name
in
dir
(
builtins
)
if
name
not
in
(
"__name__"
,
"__doc__"
)]
if
name
not
in
(
"__name__"
,
"__doc__"
)]
MAPPING
=
{
"StringIO"
:
(
"io"
,
[
"StringIO"
]),
MAPPING
=
{
"StringIO"
:
(
"io"
,
[
"StringIO"
]),
...
@@ -26,7 +26,7 @@ def alternates(members):
...
@@ -26,7 +26,7 @@ def alternates(members):
def
build_pattern
():
def
build_pattern
():
bare
=
set
()
bare
=
set
()
for
old_module
,
(
new_module
,
members
)
in
MAPPING
.
items
(
):
for
old_module
,
(
new_module
,
members
)
in
list
(
MAPPING
.
items
()
):
bare
.
add
(
old_module
)
bare
.
add
(
old_module
)
bare
.
update
(
members
)
bare
.
update
(
members
)
members
=
alternates
(
members
)
members
=
alternates
(
members
)
...
...
Lib/lib2to3/fixes/fix_renames.py
View file @
2e88ffdf
...
@@ -20,8 +20,8 @@ def alternates(members):
...
@@ -20,8 +20,8 @@ def alternates(members):
def
build_pattern
():
def
build_pattern
():
#bare = set()
#bare = set()
for
module
,
replace
in
MAPPING
.
items
(
):
for
module
,
replace
in
list
(
MAPPING
.
items
()
):
for
old_attr
,
new_attr
in
replace
.
items
(
):
for
old_attr
,
new_attr
in
list
(
replace
.
items
()
):
LOOKUP
[(
module
,
old_attr
)]
=
new_attr
LOOKUP
[(
module
,
old_attr
)]
=
new_attr
#bare.add(module)
#bare.add(module)
#bare.add(old_attr)
#bare.add(old_attr)
...
...
Lib/lib2to3/fixes/util.py
View file @
2e88ffdf
...
@@ -323,7 +323,7 @@ def _is_import_binding(node, name, package=None):
...
@@ -323,7 +323,7 @@ def _is_import_binding(node, name, package=None):
elif
node
.
type
==
syms
.
import_from
:
elif
node
.
type
==
syms
.
import_from
:
# unicode(...) is used to make life easier here, because
# unicode(...) is used to make life easier here, because
# from a.b import parses to ['import', ['a', '.', 'b'], ...]
# from a.b import parses to ['import', ['a', '.', 'b'], ...]
if
package
and
unicode
(
node
.
children
[
1
]).
strip
()
!=
package
:
if
package
and
str
(
node
.
children
[
1
]).
strip
()
!=
package
:
return
None
return
None
n
=
node
.
children
[
3
]
n
=
node
.
children
[
3
]
if
package
and
_find
(
'as'
,
n
):
if
package
and
_find
(
'as'
,
n
):
...
...
Lib/lib2to3/patcomp.py
View file @
2e88ffdf
...
@@ -31,7 +31,7 @@ _PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
...
@@ -31,7 +31,7 @@ _PATTERN_GRAMMAR_FILE = os.path.join(os.path.dirname(__file__),
def
tokenize_wrapper
(
input
):
def
tokenize_wrapper
(
input
):
"""Tokenizes a string suppressing significant whitespace."""
"""Tokenizes a string suppressing significant whitespace."""
skip
=
(
token
.
NEWLINE
,
token
.
INDENT
,
token
.
DEDENT
)
skip
=
(
token
.
NEWLINE
,
token
.
INDENT
,
token
.
DEDENT
)
tokens
=
tokenize
.
generate_tokens
(
driver
.
generate_lines
(
input
).
next
)
tokens
=
tokenize
.
generate_tokens
(
driver
.
generate_lines
(
input
).
__next__
)
for
quintuple
in
tokens
:
for
quintuple
in
tokens
:
type
,
value
,
start
,
end
,
line_text
=
quintuple
type
,
value
,
start
,
end
,
line_text
=
quintuple
if
type
not
in
skip
:
if
type
not
in
skip
:
...
...
Lib/lib2to3/pgen2/conv.py
View file @
2e88ffdf
...
@@ -60,8 +60,8 @@ class Converter(grammar.Grammar):
...
@@ -60,8 +60,8 @@ class Converter(grammar.Grammar):
"""
"""
try
:
try
:
f
=
open
(
filename
)
f
=
open
(
filename
)
except
IOError
,
err
:
except
IOError
as
err
:
print
"Can't open %s: %s"
%
(
filename
,
err
)
print
(
"Can't open %s: %s"
%
(
filename
,
err
)
)
return
False
return
False
self
.
symbol2number
=
{}
self
.
symbol2number
=
{}
self
.
number2symbol
=
{}
self
.
number2symbol
=
{}
...
@@ -70,8 +70,8 @@ class Converter(grammar.Grammar):
...
@@ -70,8 +70,8 @@ class Converter(grammar.Grammar):
lineno
+=
1
lineno
+=
1
mo
=
re
.
match
(
r"^#define\
s+(
\w+)\
s+(
\d+)$"
,
line
)
mo
=
re
.
match
(
r"^#define\
s+(
\w+)\
s+(
\d+)$"
,
line
)
if
not
mo
and
line
.
strip
():
if
not
mo
and
line
.
strip
():
print
"%s(%s): can't parse %s"
%
(
filename
,
lineno
,
print
(
"%s(%s): can't parse %s"
%
(
filename
,
lineno
,
line
.
strip
())
line
.
strip
())
)
else
:
else
:
symbol
,
number
=
mo
.
groups
()
symbol
,
number
=
mo
.
groups
()
number
=
int
(
number
)
number
=
int
(
number
)
...
@@ -111,20 +111,20 @@ class Converter(grammar.Grammar):
...
@@ -111,20 +111,20 @@ class Converter(grammar.Grammar):
"""
"""
try
:
try
:
f
=
open
(
filename
)
f
=
open
(
filename
)
except
IOError
,
err
:
except
IOError
as
err
:
print
"Can't open %s: %s"
%
(
filename
,
err
)
print
(
"Can't open %s: %s"
%
(
filename
,
err
)
)
return
False
return
False
# The code below essentially uses f's iterator-ness!
# The code below essentially uses f's iterator-ness!
lineno
=
0
lineno
=
0
# Expect the two #include lines
# Expect the two #include lines
lineno
,
line
=
lineno
+
1
,
f
.
next
(
)
lineno
,
line
=
lineno
+
1
,
next
(
f
)
assert
line
==
'#include "pgenheaders.h"
\
n
'
,
(
lineno
,
line
)
assert
line
==
'#include "pgenheaders.h"
\
n
'
,
(
lineno
,
line
)
lineno
,
line
=
lineno
+
1
,
f
.
next
(
)
lineno
,
line
=
lineno
+
1
,
next
(
f
)
assert
line
==
'#include "grammar.h"
\
n
'
,
(
lineno
,
line
)
assert
line
==
'#include "grammar.h"
\
n
'
,
(
lineno
,
line
)
# Parse the state definitions
# Parse the state definitions
lineno
,
line
=
lineno
+
1
,
f
.
next
(
)
lineno
,
line
=
lineno
+
1
,
next
(
f
)
allarcs
=
{}
allarcs
=
{}
states
=
[]
states
=
[]
while
line
.
startswith
(
"static arc "
):
while
line
.
startswith
(
"static arc "
):
...
@@ -132,35 +132,35 @@ class Converter(grammar.Grammar):
...
@@ -132,35 +132,35 @@ class Converter(grammar.Grammar):
mo
=
re
.
match
(
r"static arc arcs_(\
d+)_(
\d+)\
[(
\d+)\
] = {$
",
mo
=
re
.
match
(
r"static arc arcs_(\
d+)_(
\d+)\
[(
\d+)\
] = {$
",
line)
line)
assert mo, (lineno, line)
assert mo, (lineno, line)
n, m, k =
map(int, mo.groups(
))
n, m, k =
list(map(int, mo.groups()
))
arcs = []
arcs = []
for _ in range(k):
for _ in range(k):
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r"
\
s
+
{(
\
d
+
),
(
\
d
+
)},
$
", line)
mo = re.match(r"
\
s
+
{(
\
d
+
),
(
\
d
+
)},
$
", line)
assert mo, (lineno, line)
assert mo, (lineno, line)
i, j =
map(int, mo.groups(
))
i, j =
list(map(int, mo.groups()
))
arcs.append((i, j))
arcs.append((i, j))
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
assert line == "
};
\
n
", (lineno, line)
assert line == "
};
\
n
", (lineno, line)
allarcs[(n, m)] = arcs
allarcs[(n, m)] = arcs
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r"
static
state
states_
(
\
d
+
)
\
[(
\
d
+
)
\
]
=
{
$
", line)
mo = re.match(r"
static
state
states_
(
\
d
+
)
\
[(
\
d
+
)
\
]
=
{
$
", line)
assert mo, (lineno, line)
assert mo, (lineno, line)
s, t =
map(int, mo.groups(
))
s, t =
list(map(int, mo.groups()
))
assert s == len(states), (lineno, line)
assert s == len(states), (lineno, line)
state = []
state = []
for _ in range(t):
for _ in range(t):
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r"
\
s
+
{(
\
d
+
),
arcs_
(
\
d
+
)
_
(
\
d
+
)},
$
", line)
mo = re.match(r"
\
s
+
{(
\
d
+
),
arcs_
(
\
d
+
)
_
(
\
d
+
)},
$
", line)
assert mo, (lineno, line)
assert mo, (lineno, line)
k, n, m =
map(int, mo.groups(
))
k, n, m =
list(map(int, mo.groups()
))
arcs = allarcs[n, m]
arcs = allarcs[n, m]
assert k == len(arcs), (lineno, line)
assert k == len(arcs), (lineno, line)
state.append(arcs)
state.append(arcs)
states.append(state)
states.append(state)
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
assert line == "
};
\
n
", (lineno, line)
assert line == "
};
\
n
", (lineno, line)
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
self.states = states
self.states = states
# Parse the dfas
# Parse the dfas
...
@@ -169,18 +169,18 @@ class Converter(grammar.Grammar):
...
@@ -169,18 +169,18 @@ class Converter(grammar.Grammar):
assert mo, (lineno, line)
assert mo, (lineno, line)
ndfas = int(mo.group(1))
ndfas = int(mo.group(1))
for i in range(ndfas):
for i in range(ndfas):
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r'
\
s+{(
\
d+), "
(
\
w
+
)
", (
\
d+), (
\
d+), states_(
\
d+),$
'
,
mo = re.match(r'
\
s+{(
\
d+), "
(
\
w
+
)
", (
\
d+), (
\
d+), states_(
\
d+),$
'
,
line)
line)
assert mo, (lineno, line)
assert mo, (lineno, line)
symbol = mo.group(2)
symbol = mo.group(2)
number, x, y, z =
map(int, mo.group(1, 3, 4, 5
))
number, x, y, z =
list(map(int, mo.group(1, 3, 4, 5)
))
assert self.symbol2number[symbol] == number, (lineno, line)
assert self.symbol2number[symbol] == number, (lineno, line)
assert self.number2symbol[number] == symbol, (lineno, line)
assert self.number2symbol[number] == symbol, (lineno, line)
assert x == 0, (lineno, line)
assert x == 0, (lineno, line)
state = states[z]
state = states[z]
assert y == len(state), (lineno, line)
assert y == len(state), (lineno, line)
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r'
\
s+(
"
(?:
\
\
\
d
\
d
\
d)*
"
)},$', line)
mo = re.match(r'
\
s+(
"
(?:
\
\
\
d
\
d
\
d)*
"
)},$', line)
assert mo, (lineno, line)
assert mo, (lineno, line)
first = {}
first = {}
...
@@ -191,18 +191,18 @@ class Converter(grammar.Grammar):
...
@@ -191,18 +191,18 @@ class Converter(grammar.Grammar):
if byte & (1<<j):
if byte & (1<<j):
first[i*8 + j] = 1
first[i*8 + j] = 1
dfas[number] = (state, first)
dfas[number] = (state, first)
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
assert line == "
};
\
n
", (lineno, line)
assert line == "
};
\
n
", (lineno, line)
self.dfas = dfas
self.dfas = dfas
# Parse the labels
# Parse the labels
labels = []
labels = []
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r"
static
label
labels
\
[(
\
d
+
)
\
]
=
{
$
", line)
mo = re.match(r"
static
label
labels
\
[(
\
d
+
)
\
]
=
{
$
", line)
assert mo, (lineno, line)
assert mo, (lineno, line)
nlabels = int(mo.group(1))
nlabels = int(mo.group(1))
for i in range(nlabels):
for i in range(nlabels):
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r'
\
s+{(
\
d+), (0|"
\
w
+
")},$', line)
mo = re.match(r'
\
s+{(
\
d+), (0|"
\
w
+
")},$', line)
assert mo, (lineno, line)
assert mo, (lineno, line)
x, y = mo.groups()
x, y = mo.groups()
...
@@ -212,35 +212,35 @@ class Converter(grammar.Grammar):
...
@@ -212,35 +212,35 @@ class Converter(grammar.Grammar):
else:
else:
y = eval(y)
y = eval(y)
labels.append((x, y))
labels.append((x, y))
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
assert line == "
};
\
n
", (lineno, line)
assert line == "
};
\
n
", (lineno, line)
self.labels = labels
self.labels = labels
# Parse the grammar struct
# Parse the grammar struct
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
assert line == "
grammar
_PyParser_Grammar
=
{
\
n
", (lineno, line)
assert line == "
grammar
_PyParser_Grammar
=
{
\
n
", (lineno, line)
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r"
\
s
+
(
\
d
+
),
$
", line)
mo = re.match(r"
\
s
+
(
\
d
+
),
$
", line)
assert mo, (lineno, line)
assert mo, (lineno, line)
ndfas = int(mo.group(1))
ndfas = int(mo.group(1))
assert ndfas == len(self.dfas)
assert ndfas == len(self.dfas)
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
assert line == "
\
tdfas
,
\
n
", (lineno, line)
assert line == "
\
tdfas
,
\
n
", (lineno, line)
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r"
\
s
+
{(
\
d
+
),
labels
},
$
", line)
mo = re.match(r"
\
s
+
{(
\
d
+
),
labels
},
$
", line)
assert mo, (lineno, line)
assert mo, (lineno, line)
nlabels = int(mo.group(1))
nlabels = int(mo.group(1))
assert nlabels == len(self.labels), (lineno, line)
assert nlabels == len(self.labels), (lineno, line)
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
mo = re.match(r"
\
s
+
(
\
d
+
)
$
", line)
mo = re.match(r"
\
s
+
(
\
d
+
)
$
", line)
assert mo, (lineno, line)
assert mo, (lineno, line)
start = int(mo.group(1))
start = int(mo.group(1))
assert start in self.number2symbol, (lineno, line)
assert start in self.number2symbol, (lineno, line)
self.start = start
self.start = start
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
assert line == "
};
\
n
", (lineno, line)
assert line == "
};
\
n
", (lineno, line)
try:
try:
lineno, line = lineno+1,
f.next(
)
lineno, line = lineno+1,
next(f
)
except StopIteration:
except StopIteration:
pass
pass
else:
else:
...
...
Lib/lib2to3/pgen2/driver.py
View file @
2e88ffdf
...
@@ -99,7 +99,7 @@ class Driver(object):
...
@@ -99,7 +99,7 @@ class Driver(object):
def
parse_string
(
self
,
text
,
debug
=
False
):
def
parse_string
(
self
,
text
,
debug
=
False
):
"""Parse a string and return the syntax tree."""
"""Parse a string and return the syntax tree."""
tokens
=
tokenize
.
generate_tokens
(
generate_lines
(
text
).
next
)
tokens
=
tokenize
.
generate_tokens
(
generate_lines
(
text
).
__next__
)
return
self
.
parse_tokens
(
tokens
,
debug
)
return
self
.
parse_tokens
(
tokens
,
debug
)
...
...
Lib/lib2to3/pgen2/grammar.py
View file @
2e88ffdf
...
@@ -100,17 +100,17 @@ class Grammar(object):
...
@@ -100,17 +100,17 @@ class Grammar(object):
def
report
(
self
):
def
report
(
self
):
"""Dump the grammar tables to standard output, for debugging."""
"""Dump the grammar tables to standard output, for debugging."""
from
pprint
import
pprint
from
pprint
import
pprint
print
"s2n"
print
(
"s2n"
)
pprint
(
self
.
symbol2number
)
pprint
(
self
.
symbol2number
)
print
"n2s"
print
(
"n2s"
)
pprint
(
self
.
number2symbol
)
pprint
(
self
.
number2symbol
)
print
"states"
print
(
"states"
)
pprint
(
self
.
states
)
pprint
(
self
.
states
)
print
"dfas"
print
(
"dfas"
)
pprint
(
self
.
dfas
)
pprint
(
self
.
dfas
)
print
"labels"
print
(
"labels"
)
pprint
(
self
.
labels
)
pprint
(
self
.
labels
)
print
"start"
,
self
.
start
print
(
"start"
,
self
.
start
)
# Map from operator to number (since tokenize doesn't do this)
# Map from operator to number (since tokenize doesn't do this)
...
...
Lib/lib2to3/pgen2/literals.py
View file @
2e88ffdf
...
@@ -53,7 +53,7 @@ def test():
...
@@ -53,7 +53,7 @@ def test():
s
=
repr
(
c
)
s
=
repr
(
c
)
e
=
evalString
(
s
)
e
=
evalString
(
s
)
if
e
!=
c
:
if
e
!=
c
:
print
i
,
c
,
s
,
e
print
(
i
,
c
,
s
,
e
)
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
...
...
Lib/lib2to3/pgen2/pgen.py
View file @
2e88ffdf
...
@@ -26,7 +26,7 @@ class ParserGenerator(object):
...
@@ -26,7 +26,7 @@ class ParserGenerator(object):
def
make_grammar
(
self
):
def
make_grammar
(
self
):
c
=
PgenGrammar
()
c
=
PgenGrammar
()
names
=
self
.
dfas
.
keys
(
)
names
=
list
(
self
.
dfas
.
keys
()
)
names
.
sort
()
names
.
sort
()
names
.
remove
(
self
.
startsymbol
)
names
.
remove
(
self
.
startsymbol
)
names
.
insert
(
0
,
self
.
startsymbol
)
names
.
insert
(
0
,
self
.
startsymbol
)
...
@@ -39,7 +39,7 @@ class ParserGenerator(object):
...
@@ -39,7 +39,7 @@ class ParserGenerator(object):
states
=
[]
states
=
[]
for
state
in
dfa
:
for
state
in
dfa
:
arcs
=
[]
arcs
=
[]
for
label
,
next
in
state
.
arcs
.
ite
rite
ms
():
for
label
,
next
in
state
.
arcs
.
items
():
arcs
.
append
((
self
.
make_label
(
c
,
label
),
dfa
.
index
(
next
)))
arcs
.
append
((
self
.
make_label
(
c
,
label
),
dfa
.
index
(
next
)))
if
state
.
isfinal
:
if
state
.
isfinal
:
arcs
.
append
((
0
,
dfa
.
index
(
state
)))
arcs
.
append
((
0
,
dfa
.
index
(
state
)))
...
@@ -105,7 +105,7 @@ class ParserGenerator(object):
...
@@ -105,7 +105,7 @@ class ParserGenerator(object):
return
ilabel
return
ilabel
def
addfirstsets
(
self
):
def
addfirstsets
(
self
):
names
=
self
.
dfas
.
keys
(
)
names
=
list
(
self
.
dfas
.
keys
()
)
names
.
sort
()
names
.
sort
()
for
name
in
names
:
for
name
in
names
:
if
name
not
in
self
.
first
:
if
name
not
in
self
.
first
:
...
@@ -118,7 +118,7 @@ class ParserGenerator(object):
...
@@ -118,7 +118,7 @@ class ParserGenerator(object):
state
=
dfa
[
0
]
state
=
dfa
[
0
]
totalset
=
{}
totalset
=
{}
overlapcheck
=
{}
overlapcheck
=
{}
for
label
,
next
in
state
.
arcs
.
ite
rite
ms
():
for
label
,
next
in
state
.
arcs
.
items
():
if
label
in
self
.
dfas
:
if
label
in
self
.
dfas
:
if
label
in
self
.
first
:
if
label
in
self
.
first
:
fset
=
self
.
first
[
label
]
fset
=
self
.
first
[
label
]
...
@@ -133,7 +133,7 @@ class ParserGenerator(object):
...
@@ -133,7 +133,7 @@ class ParserGenerator(object):
totalset
[
label
]
=
1
totalset
[
label
]
=
1
overlapcheck
[
label
]
=
{
label
:
1
}
overlapcheck
[
label
]
=
{
label
:
1
}
inverse
=
{}
inverse
=
{}
for
label
,
itsfirst
in
overlapcheck
.
ite
rite
ms
():
for
label
,
itsfirst
in
overlapcheck
.
items
():
for
symbol
in
itsfirst
:
for
symbol
in
itsfirst
:
if
symbol
in
inverse
:
if
symbol
in
inverse
:
raise
ValueError
(
"rule %s is ambiguous; %s is in the"
raise
ValueError
(
"rule %s is ambiguous; %s is in the"
...
@@ -192,7 +192,7 @@ class ParserGenerator(object):
...
@@ -192,7 +192,7 @@ class ParserGenerator(object):
for
label
,
next
in
nfastate
.
arcs
:
for
label
,
next
in
nfastate
.
arcs
:
if
label
is
not
None
:
if
label
is
not
None
:
addclosure
(
next
,
arcs
.
setdefault
(
label
,
{}))
addclosure
(
next
,
arcs
.
setdefault
(
label
,
{}))
for
label
,
nfaset
in
arcs
.
ite
rite
ms
():
for
label
,
nfaset
in
arcs
.
items
():
for
st
in
states
:
for
st
in
states
:
if
st
.
nfaset
==
nfaset
:
if
st
.
nfaset
==
nfaset
:
break
break
...
@@ -203,10 +203,10 @@ class ParserGenerator(object):
...
@@ -203,10 +203,10 @@ class ParserGenerator(object):
return
states
# List of DFAState instances; first one is start
return
states
# List of DFAState instances; first one is start
def
dump_nfa
(
self
,
name
,
start
,
finish
):
def
dump_nfa
(
self
,
name
,
start
,
finish
):
print
"Dump of NFA for"
,
name
print
(
"Dump of NFA for"
,
name
)
todo
=
[
start
]
todo
=
[
start
]
for
i
,
state
in
enumerate
(
todo
):
for
i
,
state
in
enumerate
(
todo
):
print
" State"
,
i
,
state
is
finish
and
"(final)"
or
""
print
(
" State"
,
i
,
state
is
finish
and
"(final)"
or
""
)
for
label
,
next
in
state
.
arcs
:
for
label
,
next
in
state
.
arcs
:
if
next
in
todo
:
if
next
in
todo
:
j
=
todo
.
index
(
next
)
j
=
todo
.
index
(
next
)
...
@@ -214,16 +214,16 @@ class ParserGenerator(object):
...
@@ -214,16 +214,16 @@ class ParserGenerator(object):
j
=
len
(
todo
)
j
=
len
(
todo
)
todo
.
append
(
next
)
todo
.
append
(
next
)
if
label
is
None
:
if
label
is
None
:
print
" -> %d"
%
j
print
(
" -> %d"
%
j
)
else
:
else
:
print
" %s -> %d"
%
(
label
,
j
)
print
(
" %s -> %d"
%
(
label
,
j
)
)
def
dump_dfa
(
self
,
name
,
dfa
):
def
dump_dfa
(
self
,
name
,
dfa
):
print
"Dump of DFA for"
,
name
print
(
"Dump of DFA for"
,
name
)
for
i
,
state
in
enumerate
(
dfa
):
for
i
,
state
in
enumerate
(
dfa
):
print
" State"
,
i
,
state
.
isfinal
and
"(final)"
or
""
print
(
" State"
,
i
,
state
.
isfinal
and
"(final)"
or
""
)
for
label
,
next
in
state
.
arcs
.
ite
rite
ms
():
for
label
,
next
in
state
.
arcs
.
items
():
print
" %s -> %d"
%
(
label
,
dfa
.
index
(
next
))
print
(
" %s -> %d"
%
(
label
,
dfa
.
index
(
next
)
))
def
simplify_dfa
(
self
,
dfa
):
def
simplify_dfa
(
self
,
dfa
):
# This is not theoretically optimal, but works well enough.
# This is not theoretically optimal, but works well enough.
...
@@ -319,9 +319,9 @@ class ParserGenerator(object):
...
@@ -319,9 +319,9 @@ class ParserGenerator(object):
return
value
return
value
def
gettoken
(
self
):
def
gettoken
(
self
):
tup
=
self
.
generator
.
next
(
)
tup
=
next
(
self
.
generator
)
while
tup
[
0
]
in
(
tokenize
.
COMMENT
,
tokenize
.
NL
):
while
tup
[
0
]
in
(
tokenize
.
COMMENT
,
tokenize
.
NL
):
tup
=
self
.
generator
.
next
(
)
tup
=
next
(
self
.
generator
)
self
.
type
,
self
.
value
,
self
.
begin
,
self
.
end
,
self
.
line
=
tup
self
.
type
,
self
.
value
,
self
.
begin
,
self
.
end
,
self
.
line
=
tup
#print token.tok_name[self.type], repr(self.value)
#print token.tok_name[self.type], repr(self.value)
...
@@ -330,7 +330,7 @@ class ParserGenerator(object):
...
@@ -330,7 +330,7 @@ class ParserGenerator(object):
try
:
try
:
msg
=
msg
%
args
msg
=
msg
%
args
except
:
except
:
msg
=
" "
.
join
([
msg
]
+
map
(
str
,
args
))
msg
=
" "
.
join
([
msg
]
+
list
(
map
(
str
,
args
)
))
raise
SyntaxError
(
msg
,
(
self
.
filename
,
self
.
end
[
0
],
raise
SyntaxError
(
msg
,
(
self
.
filename
,
self
.
end
[
0
],
self
.
end
[
1
],
self
.
line
))
self
.
end
[
1
],
self
.
line
))
...
@@ -348,7 +348,7 @@ class DFAState(object):
...
@@ -348,7 +348,7 @@ class DFAState(object):
def
__init__
(
self
,
nfaset
,
final
):
def
__init__
(
self
,
nfaset
,
final
):
assert
isinstance
(
nfaset
,
dict
)
assert
isinstance
(
nfaset
,
dict
)
assert
isinstance
(
iter
(
nfaset
).
next
(
),
NFAState
)
assert
isinstance
(
next
(
iter
(
nfaset
)
),
NFAState
)
assert
isinstance
(
final
,
NFAState
)
assert
isinstance
(
final
,
NFAState
)
self
.
nfaset
=
nfaset
self
.
nfaset
=
nfaset
self
.
isfinal
=
final
in
nfaset
self
.
isfinal
=
final
in
nfaset
...
@@ -361,7 +361,7 @@ class DFAState(object):
...
@@ -361,7 +361,7 @@ class DFAState(object):
self
.
arcs
[
label
]
=
next
self
.
arcs
[
label
]
=
next
def
unifystate
(
self
,
old
,
new
):
def
unifystate
(
self
,
old
,
new
):
for
label
,
next
in
self
.
arcs
.
ite
rite
ms
():
for
label
,
next
in
self
.
arcs
.
items
():
if
next
is
old
:
if
next
is
old
:
self
.
arcs
[
label
]
=
new
self
.
arcs
[
label
]
=
new
...
@@ -374,7 +374,7 @@ class DFAState(object):
...
@@ -374,7 +374,7 @@ class DFAState(object):
# would invoke this method recursively, with cycles...
# would invoke this method recursively, with cycles...
if
len
(
self
.
arcs
)
!=
len
(
other
.
arcs
):
if
len
(
self
.
arcs
)
!=
len
(
other
.
arcs
):
return
False
return
False
for
label
,
next
in
self
.
arcs
.
ite
rite
ms
():
for
label
,
next
in
self
.
arcs
.
items
():
if
next
is
not
other
.
arcs
.
get
(
label
):
if
next
is
not
other
.
arcs
.
get
(
label
):
return
False
return
False
return
True
return
True
...
...
Lib/lib2to3/pgen2/token.py
View file @
2e88ffdf
...
@@ -67,7 +67,7 @@ NT_OFFSET = 256
...
@@ -67,7 +67,7 @@ NT_OFFSET = 256
#--end constants--
#--end constants--
tok_name
=
{}
tok_name
=
{}
for
_name
,
_value
in
globals
().
items
(
):
for
_name
,
_value
in
list
(
globals
().
items
()
):
if
type
(
_value
)
is
type
(
0
):
if
type
(
_value
)
is
type
(
0
):
tok_name
[
_value
]
=
_name
tok_name
[
_value
]
=
_name
...
...
Lib/lib2to3/pgen2/tokenize.py
View file @
2e88ffdf
...
@@ -94,8 +94,8 @@ ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
...
@@ -94,8 +94,8 @@ ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
PseudoExtras
=
group
(
r'\\\r?\n'
,
Comment
,
Triple
)
PseudoExtras
=
group
(
r'\\\r?\n'
,
Comment
,
Triple
)
PseudoToken
=
Whitespace
+
group
(
PseudoExtras
,
Number
,
Funny
,
ContStr
,
Name
)
PseudoToken
=
Whitespace
+
group
(
PseudoExtras
,
Number
,
Funny
,
ContStr
,
Name
)
tokenprog
,
pseudoprog
,
single3prog
,
double3prog
=
map
(
tokenprog
,
pseudoprog
,
single3prog
,
double3prog
=
list
(
map
(
re
.
compile
,
(
Token
,
PseudoToken
,
Single3
,
Double3
))
re
.
compile
,
(
Token
,
PseudoToken
,
Single3
,
Double3
))
)
endprogs
=
{
"'"
:
re
.
compile
(
Single
),
'"'
:
re
.
compile
(
Double
),
endprogs
=
{
"'"
:
re
.
compile
(
Single
),
'"'
:
re
.
compile
(
Double
),
"'''"
:
single3prog
,
'"""'
:
double3prog
,
"'''"
:
single3prog
,
'"""'
:
double3prog
,
"r'''"
:
single3prog
,
'r"""'
:
double3prog
,
"r'''"
:
single3prog
,
'r"""'
:
double3prog
,
...
@@ -143,9 +143,11 @@ class TokenError(Exception): pass
...
@@ -143,9 +143,11 @@ class TokenError(Exception): pass
class
StopTokenizing
(
Exception
):
pass
class
StopTokenizing
(
Exception
):
pass
def
printtoken
(
type
,
token
,
(
srow
,
scol
),
(
erow
,
ecol
),
line
):
# for testing
def
printtoken
(
type
,
token
,
xxx_todo_changeme
,
xxx_todo_changeme1
,
line
):
# for testing
print
"%d,%d-%d,%d:
\
t
%s
\
t
%s"
%
\
(
srow
,
scol
)
=
xxx_todo_changeme
(
srow
,
scol
,
erow
,
ecol
,
tok_name
[
type
],
repr
(
token
))
(
erow
,
ecol
)
=
xxx_todo_changeme1
print
(
"%d,%d-%d,%d:
\
t
%s
\
t
%s"
%
\
(
srow
,
scol
,
erow
,
ecol
,
tok_name
[
type
],
repr
(
token
)))
def
tokenize
(
readline
,
tokeneater
=
printtoken
):
def
tokenize
(
readline
,
tokeneater
=
printtoken
):
"""
"""
...
@@ -279,7 +281,7 @@ def generate_tokens(readline):
...
@@ -279,7 +281,7 @@ def generate_tokens(readline):
if
contstr
:
# continued string
if
contstr
:
# continued string
if
not
line
:
if
not
line
:
raise
TokenError
,
(
"EOF in multi-line string"
,
strstart
)
raise
TokenError
(
"EOF in multi-line string"
,
strstart
)
endmatch
=
endprog
.
match
(
line
)
endmatch
=
endprog
.
match
(
line
)
if
endmatch
:
if
endmatch
:
pos
=
end
=
endmatch
.
end
(
0
)
pos
=
end
=
endmatch
.
end
(
0
)
...
@@ -335,7 +337,7 @@ def generate_tokens(readline):
...
@@ -335,7 +337,7 @@ def generate_tokens(readline):
else
:
# continued statement
else
:
# continued statement
if
not
line
:
if
not
line
:
raise
TokenError
,
(
"EOF in multi-line statement"
,
(
lnum
,
0
))
raise
TokenError
(
"EOF in multi-line statement"
,
(
lnum
,
0
))
continued
=
0
continued
=
0
while
pos
<
max
:
while
pos
<
max
:
...
...
Lib/lib2to3/pygram.py
View file @
2e88ffdf
...
@@ -23,7 +23,7 @@ class Symbols(object):
...
@@ -23,7 +23,7 @@ class Symbols(object):
Creates an attribute for each grammar symbol (nonterminal),
Creates an attribute for each grammar symbol (nonterminal),
whose value is the symbol's type (an int >= 256).
whose value is the symbol's type (an int >= 256).
"""
"""
for
name
,
symbol
in
grammar
.
symbol2number
.
ite
rite
ms
():
for
name
,
symbol
in
grammar
.
symbol2number
.
items
():
setattr
(
self
,
name
,
symbol
)
setattr
(
self
,
name
,
symbol
)
...
...
Lib/lib2to3/pytree.py
View file @
2e88ffdf
...
@@ -443,7 +443,7 @@ class LeafPattern(BasePattern):
...
@@ -443,7 +443,7 @@ class LeafPattern(BasePattern):
if
type
is
not
None
:
if
type
is
not
None
:
assert
0
<=
type
<
256
,
type
assert
0
<=
type
<
256
,
type
if
content
is
not
None
:
if
content
is
not
None
:
assert
isinstance
(
content
,
basestring
),
repr
(
content
)
assert
isinstance
(
content
,
str
),
repr
(
content
)
self
.
type
=
type
self
.
type
=
type
self
.
content
=
content
self
.
content
=
content
self
.
name
=
name
self
.
name
=
name
...
@@ -491,7 +491,7 @@ class NodePattern(BasePattern):
...
@@ -491,7 +491,7 @@ class NodePattern(BasePattern):
if
type
is
not
None
:
if
type
is
not
None
:
assert
type
>=
256
,
type
assert
type
>=
256
,
type
if
content
is
not
None
:
if
content
is
not
None
:
assert
not
isinstance
(
content
,
basestring
),
repr
(
content
)
assert
not
isinstance
(
content
,
str
),
repr
(
content
)
content
=
list
(
content
)
content
=
list
(
content
)
for
i
,
item
in
enumerate
(
content
):
for
i
,
item
in
enumerate
(
content
):
assert
isinstance
(
item
,
BasePattern
),
(
i
,
item
)
assert
isinstance
(
item
,
BasePattern
),
(
i
,
item
)
...
@@ -622,7 +622,7 @@ class WildcardPattern(BasePattern):
...
@@ -622,7 +622,7 @@ class WildcardPattern(BasePattern):
"""
"""
if
self
.
content
is
None
:
if
self
.
content
is
None
:
# Shortcut for special case (see __init__.__doc__)
# Shortcut for special case (see __init__.__doc__)
for
count
in
x
range
(
self
.
min
,
1
+
min
(
len
(
nodes
),
self
.
max
)):
for
count
in
range
(
self
.
min
,
1
+
min
(
len
(
nodes
),
self
.
max
)):
r
=
{}
r
=
{}
if
self
.
name
:
if
self
.
name
:
r
[
self
.
name
]
=
nodes
[:
count
]
r
[
self
.
name
]
=
nodes
[:
count
]
...
...
Lib/lib2to3/refactor.py
View file @
2e88ffdf
...
@@ -63,14 +63,14 @@ def main(args=None):
...
@@ -63,14 +63,14 @@ def main(args=None):
# Parse command line arguments
# Parse command line arguments
options
,
args
=
parser
.
parse_args
(
args
)
options
,
args
=
parser
.
parse_args
(
args
)
if
options
.
list_fixes
:
if
options
.
list_fixes
:
print
"Available transformations for the -f/--fix option:"
print
(
"Available transformations for the -f/--fix option:"
)
for
fixname
in
get_all_fix_names
():
for
fixname
in
get_all_fix_names
():
print
fixname
print
(
fixname
)
if
not
args
:
if
not
args
:
return
0
return
0
if
not
args
:
if
not
args
:
print
>>
sys
.
stderr
,
"At least one file or directory argument required."
print
(
"At least one file or directory argument required."
,
file
=
sys
.
stderr
)
print
>>
sys
.
stderr
,
"Use --help to show usage."
print
(
"Use --help to show usage."
,
file
=
sys
.
stderr
)
return
2
return
2
# Initialize the refactoring tool
# Initialize the refactoring tool
...
@@ -145,7 +145,7 @@ class RefactoringTool(object):
...
@@ -145,7 +145,7 @@ class RefactoringTool(object):
continue
continue
try
:
try
:
fixer
=
fix_class
(
self
.
options
,
self
.
fixer_log
)
fixer
=
fix_class
(
self
.
options
,
self
.
fixer_log
)
except
Exception
,
err
:
except
Exception
as
err
:
self
.
log_error
(
"Can't instantiate fixes.fix_%s.%s()"
,
self
.
log_error
(
"Can't instantiate fixes.fix_%s.%s()"
,
fix_name
,
class_name
,
exc_info
=
True
)
fix_name
,
class_name
,
exc_info
=
True
)
continue
continue
...
@@ -207,7 +207,7 @@ class RefactoringTool(object):
...
@@ -207,7 +207,7 @@ class RefactoringTool(object):
"""Refactors a file."""
"""Refactors a file."""
try
:
try
:
f
=
open
(
filename
)
f
=
open
(
filename
)
except
IOError
,
err
:
except
IOError
as
err
:
self
.
log_error
(
"Can't open %s: %s"
,
filename
,
err
)
self
.
log_error
(
"Can't open %s: %s"
,
filename
,
err
)
return
return
try
:
try
:
...
@@ -243,7 +243,7 @@ class RefactoringTool(object):
...
@@ -243,7 +243,7 @@ class RefactoringTool(object):
"""
"""
try
:
try
:
tree
=
self
.
driver
.
parse_string
(
data
,
1
)
tree
=
self
.
driver
.
parse_string
(
data
,
1
)
except
Exception
,
err
:
except
Exception
as
err
:
self
.
log_error
(
"Can't parse %s: %s: %s"
,
self
.
log_error
(
"Can't parse %s: %s: %s"
,
name
,
err
.
__class__
.
__name__
,
err
)
name
,
err
.
__class__
.
__name__
,
err
)
return
return
...
@@ -331,7 +331,7 @@ class RefactoringTool(object):
...
@@ -331,7 +331,7 @@ class RefactoringTool(object):
if
old_text
is
None
:
if
old_text
is
None
:
try
:
try
:
f
=
open
(
filename
,
"r"
)
f
=
open
(
filename
,
"r"
)
except
IOError
,
err
:
except
IOError
as
err
:
self
.
log_error
(
"Can't read %s: %s"
,
filename
,
err
)
self
.
log_error
(
"Can't read %s: %s"
,
filename
,
err
)
return
return
try
:
try
:
...
@@ -351,21 +351,21 @@ class RefactoringTool(object):
...
@@ -351,21 +351,21 @@ class RefactoringTool(object):
if
os
.
path
.
lexists
(
backup
):
if
os
.
path
.
lexists
(
backup
):
try
:
try
:
os
.
remove
(
backup
)
os
.
remove
(
backup
)
except
os
.
error
,
err
:
except
os
.
error
as
err
:
self
.
log_message
(
"Can't remove backup %s"
,
backup
)
self
.
log_message
(
"Can't remove backup %s"
,
backup
)
try
:
try
:
os
.
rename
(
filename
,
backup
)
os
.
rename
(
filename
,
backup
)
except
os
.
error
,
err
:
except
os
.
error
as
err
:
self
.
log_message
(
"Can't rename %s to %s"
,
filename
,
backup
)
self
.
log_message
(
"Can't rename %s to %s"
,
filename
,
backup
)
try
:
try
:
f
=
open
(
filename
,
"w"
)
f
=
open
(
filename
,
"w"
)
except
os
.
error
,
err
:
except
os
.
error
as
err
:
self
.
log_error
(
"Can't create %s: %s"
,
filename
,
err
)
self
.
log_error
(
"Can't create %s: %s"
,
filename
,
err
)
return
return
try
:
try
:
try
:
try
:
f
.
write
(
new_text
)
f
.
write
(
new_text
)
except
os
.
error
,
err
:
except
os
.
error
as
err
:
self
.
log_error
(
"Can't write %s: %s"
,
filename
,
err
)
self
.
log_error
(
"Can't write %s: %s"
,
filename
,
err
)
finally
:
finally
:
f
.
close
()
f
.
close
()
...
@@ -428,7 +428,7 @@ class RefactoringTool(object):
...
@@ -428,7 +428,7 @@ class RefactoringTool(object):
"""
"""
try
:
try
:
tree
=
self
.
parse_block
(
block
,
lineno
,
indent
)
tree
=
self
.
parse_block
(
block
,
lineno
,
indent
)
except
Exception
,
err
:
except
Exception
as
err
:
if
self
.
options
.
verbose
:
if
self
.
options
.
verbose
:
for
line
in
block
:
for
line
in
block
:
self
.
log_message
(
"Source: %s"
,
line
.
rstrip
(
"
\
n
"
))
self
.
log_message
(
"Source: %s"
,
line
.
rstrip
(
"
\
n
"
))
...
@@ -480,7 +480,7 @@ class RefactoringTool(object):
...
@@ -480,7 +480,7 @@ class RefactoringTool(object):
def
wrap_toks
(
self
,
block
,
lineno
,
indent
):
def
wrap_toks
(
self
,
block
,
lineno
,
indent
):
"""Wraps a tokenize stream to systematically modify start/end."""
"""Wraps a tokenize stream to systematically modify start/end."""
tokens
=
tokenize
.
generate_tokens
(
self
.
gen_lines
(
block
,
indent
).
next
)
tokens
=
tokenize
.
generate_tokens
(
self
.
gen_lines
(
block
,
indent
).
__next__
)
for
type
,
value
,
(
line0
,
col0
),
(
line1
,
col1
),
line_text
in
tokens
:
for
type
,
value
,
(
line0
,
col0
),
(
line1
,
col1
),
line_text
in
tokens
:
line0
+=
lineno
-
1
line0
+=
lineno
-
1
line1
+=
lineno
-
1
line1
+=
lineno
-
1
...
@@ -519,7 +519,7 @@ def diff_texts(a, b, filename):
...
@@ -519,7 +519,7 @@ def diff_texts(a, b, filename):
for
line
in
difflib
.
unified_diff
(
a
,
b
,
filename
,
filename
,
for
line
in
difflib
.
unified_diff
(
a
,
b
,
filename
,
filename
,
"(original)"
,
"(refactored)"
,
"(original)"
,
"(refactored)"
,
lineterm
=
""
):
lineterm
=
""
):
print
line
print
(
line
)
if
__name__
==
"__main__"
:
if
__name__
==
"__main__"
:
...
...
Lib/lib2to3/tests/benchmark.py
View file @
2e88ffdf
...
@@ -23,7 +23,7 @@ from .. import refactor
...
@@ -23,7 +23,7 @@ from .. import refactor
###############################################################################
###############################################################################
class
Options
:
class
Options
:
def
__init__
(
self
,
**
kwargs
):
def
__init__
(
self
,
**
kwargs
):
for
k
,
v
in
kwargs
.
items
(
):
for
k
,
v
in
list
(
kwargs
.
items
()
):
setattr
(
self
,
k
,
v
)
setattr
(
self
,
k
,
v
)
self
.
verbose
=
False
self
.
verbose
=
False
...
@@ -34,7 +34,7 @@ def dummy_transform(*args, **kwargs):
...
@@ -34,7 +34,7 @@ def dummy_transform(*args, **kwargs):
### Collect list of modules to match against
### Collect list of modules to match against
###############################################################################
###############################################################################
files
=
[]
files
=
[]
for
mod
in
sys
.
modules
.
values
(
):
for
mod
in
list
(
sys
.
modules
.
values
()
):
if
mod
is
None
or
not
hasattr
(
mod
,
'__file__'
):
if
mod
is
None
or
not
hasattr
(
mod
,
'__file__'
):
continue
continue
f
=
mod
.
__file__
f
=
mod
.
__file__
...
@@ -53,6 +53,6 @@ for fixer in refactor.fixers:
...
@@ -53,6 +53,6 @@ for fixer in refactor.fixers:
t
=
time
()
t
=
time
()
for
f
in
files
:
for
f
in
files
:
print
"Matching"
,
f
print
(
"Matching"
,
f
)
refactor
.
refactor_file
(
f
)
refactor
.
refactor_file
(
f
)
print
"%d seconds to match %d files"
%
(
time
()
-
t
,
len
(
sys
.
modules
))
print
(
"%d seconds to match %d files"
%
(
time
()
-
t
,
len
(
sys
.
modules
)
))
Lib/lib2to3/tests/data/py2_test_grammar.py
View file @
2e88ffdf
This diff is collapsed.
Click to expand it.
Lib/lib2to3/tests/data/py3_test_grammar.py
View file @
2e88ffdf
...
@@ -32,7 +32,7 @@ class TokenTests(unittest.TestCase):
...
@@ -32,7 +32,7 @@ class TokenTests(unittest.TestCase):
self
.
assertEquals
(
0o377
,
255
)
self
.
assertEquals
(
0o377
,
255
)
self
.
assertEquals
(
2147483647
,
0o17777777777
)
self
.
assertEquals
(
2147483647
,
0o17777777777
)
self
.
assertEquals
(
0b1001
,
9
)
self
.
assertEquals
(
0b1001
,
9
)
from
sys
import
max
int
from
sys
import
max
size
if
maxint
==
2147483647
:
if
maxint
==
2147483647
:
self
.
assertEquals
(
-
2147483647
-
1
,
-
0o20000000000
)
self
.
assertEquals
(
-
2147483647
-
1
,
-
0o20000000000
)
# XXX -2147483648
# XXX -2147483648
...
@@ -438,7 +438,7 @@ class GrammarTests(unittest.TestCase):
...
@@ -438,7 +438,7 @@ class GrammarTests(unittest.TestCase):
def
testRaise
(
self
):
def
testRaise
(
self
):
# 'raise' test [',' test]
# 'raise' test [',' test]
try
:
raise
RuntimeError
,
'just testing'
try
:
raise
RuntimeError
(
'just testing'
)
except
RuntimeError
:
pass
except
RuntimeError
:
pass
try
:
raise
KeyboardInterrupt
try
:
raise
KeyboardInterrupt
except
KeyboardInterrupt
:
pass
except
KeyboardInterrupt
:
pass
...
...
Lib/lib2to3/tests/pytree_idempotency.py
View file @
2e88ffdf
...
@@ -28,7 +28,7 @@ def main():
...
@@ -28,7 +28,7 @@ def main():
fn
=
"example.py"
fn
=
"example.py"
tree
=
dr
.
parse_file
(
fn
,
debug
=
True
)
tree
=
dr
.
parse_file
(
fn
,
debug
=
True
)
if
not
diff
(
fn
,
tree
):
if
not
diff
(
fn
,
tree
):
print
"No diffs."
print
(
"No diffs."
)
if
not
sys
.
argv
[
1
:]:
if
not
sys
.
argv
[
1
:]:
return
# Pass a dummy argument to run the complete test suite below
return
# Pass a dummy argument to run the complete test suite below
...
@@ -44,7 +44,7 @@ def main():
...
@@ -44,7 +44,7 @@ def main():
fn
=
fn
[:
-
1
]
fn
=
fn
[:
-
1
]
if
not
fn
.
endswith
(
".py"
):
if
not
fn
.
endswith
(
".py"
):
continue
continue
print
>>
sys
.
stderr
,
"Parsing"
,
fn
print
(
"Parsing"
,
fn
,
file
=
sys
.
stderr
)
tree
=
dr
.
parse_file
(
fn
,
debug
=
True
)
tree
=
dr
.
parse_file
(
fn
,
debug
=
True
)
if
diff
(
fn
,
tree
):
if
diff
(
fn
,
tree
):
problems
.
append
(
fn
)
problems
.
append
(
fn
)
...
@@ -55,27 +55,27 @@ def main():
...
@@ -55,27 +55,27 @@ def main():
names
=
os
.
listdir
(
dir
)
names
=
os
.
listdir
(
dir
)
except
os
.
error
:
except
os
.
error
:
continue
continue
print
>>
sys
.
stderr
,
"Scanning"
,
dir
,
"..."
print
(
"Scanning"
,
dir
,
"..."
,
file
=
sys
.
stderr
)
for
name
in
names
:
for
name
in
names
:
if
not
name
.
endswith
(
".py"
):
if
not
name
.
endswith
(
".py"
):
continue
continue
print
>>
sys
.
stderr
,
"Parsing"
,
name
print
(
"Parsing"
,
name
,
file
=
sys
.
stderr
)
fn
=
os
.
path
.
join
(
dir
,
name
)
fn
=
os
.
path
.
join
(
dir
,
name
)
try
:
try
:
tree
=
dr
.
parse_file
(
fn
,
debug
=
True
)
tree
=
dr
.
parse_file
(
fn
,
debug
=
True
)
except
pgen2
.
parse
.
ParseError
,
err
:
except
pgen2
.
parse
.
ParseError
as
err
:
print
"ParseError:"
,
err
print
(
"ParseError:"
,
err
)
else
:
else
:
if
diff
(
fn
,
tree
):
if
diff
(
fn
,
tree
):
problems
.
append
(
fn
)
problems
.
append
(
fn
)
# Show summary of problem files
# Show summary of problem files
if
not
problems
:
if
not
problems
:
print
"No problems. Congratulations!"
print
(
"No problems. Congratulations!"
)
else
:
else
:
print
"Problems in following files:"
print
(
"Problems in following files:"
)
for
fn
in
problems
:
for
fn
in
problems
:
print
"***"
,
fn
print
(
"***"
,
fn
)
def
diff
(
fn
,
tree
):
def
diff
(
fn
,
tree
):
f
=
open
(
"@"
,
"w"
)
f
=
open
(
"@"
,
"w"
)
...
...
Lib/lib2to3/tests/test_all_fixers.py
View file @
2e88ffdf
...
@@ -21,7 +21,7 @@ from .. import refactor
...
@@ -21,7 +21,7 @@ from .. import refactor
class
Options
:
class
Options
:
def
__init__
(
self
,
**
kwargs
):
def
__init__
(
self
,
**
kwargs
):
for
k
,
v
in
kwargs
.
items
(
):
for
k
,
v
in
list
(
kwargs
.
items
()
):
setattr
(
self
,
k
,
v
)
setattr
(
self
,
k
,
v
)
self
.
verbose
=
False
self
.
verbose
=
False
...
@@ -33,7 +33,7 @@ class Test_all(support.TestCase):
...
@@ -33,7 +33,7 @@ class Test_all(support.TestCase):
def
test_all_project_files
(
self
):
def
test_all_project_files
(
self
):
for
filepath
in
support
.
all_project_files
():
for
filepath
in
support
.
all_project_files
():
print
"Fixing %s..."
%
filepath
print
(
"Fixing %s..."
%
filepath
)
self
.
refactor
.
refactor_string
(
open
(
filepath
).
read
(),
filepath
)
self
.
refactor
.
refactor_string
(
open
(
filepath
).
read
(),
filepath
)
...
...
Lib/lib2to3/tests/test_fixers.py
View file @
2e88ffdf
...
@@ -18,7 +18,7 @@ from .. import refactor
...
@@ -18,7 +18,7 @@ from .. import refactor
class
Options
:
class
Options
:
def
__init__
(
self
,
**
kwargs
):
def
__init__
(
self
,
**
kwargs
):
for
k
,
v
in
kwargs
.
items
(
):
for
k
,
v
in
list
(
kwargs
.
items
()
):
setattr
(
self
,
k
,
v
)
setattr
(
self
,
k
,
v
)
self
.
verbose
=
False
self
.
verbose
=
False
...
@@ -1285,7 +1285,7 @@ class Test_imports(FixerTestCase):
...
@@ -1285,7 +1285,7 @@ class Test_imports(FixerTestCase):
}
}
def
test_import_module
(
self
):
def
test_import_module
(
self
):
for
old
,
(
new
,
members
)
in
self
.
modules
.
items
(
):
for
old
,
(
new
,
members
)
in
list
(
self
.
modules
.
items
()
):
b
=
"import %s"
%
old
b
=
"import %s"
%
old
a
=
"import %s"
%
new
a
=
"import %s"
%
new
self
.
check
(
b
,
a
)
self
.
check
(
b
,
a
)
...
@@ -1295,7 +1295,7 @@ class Test_imports(FixerTestCase):
...
@@ -1295,7 +1295,7 @@ class Test_imports(FixerTestCase):
self
.
check
(
b
,
a
)
self
.
check
(
b
,
a
)
def
test_import_from
(
self
):
def
test_import_from
(
self
):
for
old
,
(
new
,
members
)
in
self
.
modules
.
items
(
):
for
old
,
(
new
,
members
)
in
list
(
self
.
modules
.
items
()
):
for
member
in
members
:
for
member
in
members
:
b
=
"from %s import %s"
%
(
old
,
member
)
b
=
"from %s import %s"
%
(
old
,
member
)
a
=
"from %s import %s"
%
(
new
,
member
)
a
=
"from %s import %s"
%
(
new
,
member
)
...
@@ -1305,7 +1305,7 @@ class Test_imports(FixerTestCase):
...
@@ -1305,7 +1305,7 @@ class Test_imports(FixerTestCase):
self
.
unchanged
(
s
)
self
.
unchanged
(
s
)
def
test_import_module_as
(
self
):
def
test_import_module_as
(
self
):
for
old
,
(
new
,
members
)
in
self
.
modules
.
items
(
):
for
old
,
(
new
,
members
)
in
list
(
self
.
modules
.
items
()
):
b
=
"import %s as foo_bar"
%
old
b
=
"import %s as foo_bar"
%
old
a
=
"import %s as foo_bar"
%
new
a
=
"import %s as foo_bar"
%
new
self
.
check
(
b
,
a
)
self
.
check
(
b
,
a
)
...
@@ -1315,7 +1315,7 @@ class Test_imports(FixerTestCase):
...
@@ -1315,7 +1315,7 @@ class Test_imports(FixerTestCase):
self
.
check
(
b
,
a
)
self
.
check
(
b
,
a
)
def
test_import_from_as
(
self
):
def
test_import_from_as
(
self
):
for
old
,
(
new
,
members
)
in
self
.
modules
.
items
(
):
for
old
,
(
new
,
members
)
in
list
(
self
.
modules
.
items
()
):
for
member
in
members
:
for
member
in
members
:
b
=
"from %s import %s as foo_bar"
%
(
old
,
member
)
b
=
"from %s import %s as foo_bar"
%
(
old
,
member
)
a
=
"from %s import %s as foo_bar"
%
(
new
,
member
)
a
=
"from %s import %s as foo_bar"
%
(
new
,
member
)
...
@@ -1327,7 +1327,7 @@ class Test_imports(FixerTestCase):
...
@@ -1327,7 +1327,7 @@ class Test_imports(FixerTestCase):
self
.
warns_unchanged
(
s
,
"Cannot handle star imports"
)
self
.
warns_unchanged
(
s
,
"Cannot handle star imports"
)
def
test_import_module_usage
(
self
):
def
test_import_module_usage
(
self
):
for
old
,
(
new
,
members
)
in
self
.
modules
.
items
(
):
for
old
,
(
new
,
members
)
in
list
(
self
.
modules
.
items
()
):
for
member
in
members
:
for
member
in
members
:
b
=
"""
b
=
"""
import %s
import %s
...
@@ -1340,7 +1340,7 @@ class Test_imports(FixerTestCase):
...
@@ -1340,7 +1340,7 @@ class Test_imports(FixerTestCase):
self
.
check
(
b
,
a
)
self
.
check
(
b
,
a
)
def
test_from_import_usage
(
self
):
def
test_from_import_usage
(
self
):
for
old
,
(
new
,
members
)
in
self
.
modules
.
items
(
):
for
old
,
(
new
,
members
)
in
list
(
self
.
modules
.
items
()
):
for
member
in
members
:
for
member
in
members
:
b
=
"""
b
=
"""
from %s import %s
from %s import %s
...
@@ -2211,7 +2211,7 @@ class Test_renames(FixerTestCase):
...
@@ -2211,7 +2211,7 @@ class Test_renames(FixerTestCase):
}
}
def
test_import_from
(
self
):
def
test_import_from
(
self
):
for
mod
,
(
old
,
new
)
in
self
.
modules
.
items
(
):
for
mod
,
(
old
,
new
)
in
list
(
self
.
modules
.
items
()
):
b
=
"from %s import %s"
%
(
mod
,
old
)
b
=
"from %s import %s"
%
(
mod
,
old
)
a
=
"from %s import %s"
%
(
mod
,
new
)
a
=
"from %s import %s"
%
(
mod
,
new
)
self
.
check
(
b
,
a
)
self
.
check
(
b
,
a
)
...
@@ -2220,13 +2220,13 @@ class Test_renames(FixerTestCase):
...
@@ -2220,13 +2220,13 @@ class Test_renames(FixerTestCase):
self
.
unchanged
(
s
)
self
.
unchanged
(
s
)
def
test_import_from_as
(
self
):
def
test_import_from_as
(
self
):
for
mod
,
(
old
,
new
)
in
self
.
modules
.
items
(
):
for
mod
,
(
old
,
new
)
in
list
(
self
.
modules
.
items
()
):
b
=
"from %s import %s as foo_bar"
%
(
mod
,
old
)
b
=
"from %s import %s as foo_bar"
%
(
mod
,
old
)
a
=
"from %s import %s as foo_bar"
%
(
mod
,
new
)
a
=
"from %s import %s as foo_bar"
%
(
mod
,
new
)
self
.
check
(
b
,
a
)
self
.
check
(
b
,
a
)
def
test_import_module_usage
(
self
):
def
test_import_module_usage
(
self
):
for
mod
,
(
old
,
new
)
in
self
.
modules
.
items
(
):
for
mod
,
(
old
,
new
)
in
list
(
self
.
modules
.
items
()
):
b
=
"""
b
=
"""
import %s
import %s
foo(%s, %s.%s)
foo(%s, %s.%s)
...
@@ -2239,7 +2239,7 @@ class Test_renames(FixerTestCase):
...
@@ -2239,7 +2239,7 @@ class Test_renames(FixerTestCase):
def
XXX_test_from_import_usage
(
self
):
def
XXX_test_from_import_usage
(
self
):
# not implemented yet
# not implemented yet
for
mod
,
(
old
,
new
)
in
self
.
modules
.
items
(
):
for
mod
,
(
old
,
new
)
in
list
(
self
.
modules
.
items
()
):
b
=
"""
b
=
"""
from %s import %s
from %s import %s
foo(%s, %s)
foo(%s, %s)
...
...
Lib/lib2to3/tests/test_parser.py
View file @
2e88ffdf
...
@@ -149,7 +149,7 @@ class TestParserIdempotency(support.TestCase):
...
@@ -149,7 +149,7 @@ class TestParserIdempotency(support.TestCase):
def
test_all_project_files
(
self
):
def
test_all_project_files
(
self
):
for
filepath
in
support
.
all_project_files
():
for
filepath
in
support
.
all_project_files
():
print
"Parsing %s..."
%
filepath
print
(
"Parsing %s..."
%
filepath
)
tree
=
driver
.
parse_file
(
filepath
,
debug
=
True
)
tree
=
driver
.
parse_file
(
filepath
,
debug
=
True
)
if
diff
(
filepath
,
tree
):
if
diff
(
filepath
,
tree
):
self
.
fail
(
"Idempotency failed: %s"
%
filepath
)
self
.
fail
(
"Idempotency failed: %s"
%
filepath
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment