Commit da99d1cb authored by Raymond Hettinger's avatar Raymond Hettinger

SF bug #1224621: tokenize module does not detect inconsistent dedents

parent 8fa7eb56
from test.test_support import verbose, findfile, is_resource_enabled from test.test_support import verbose, findfile, is_resource_enabled, TestFailed
import os, glob, random import os, glob, random
from tokenize import (tokenize, generate_tokens, untokenize, from tokenize import (tokenize, generate_tokens, untokenize,
NUMBER, NAME, OP, STRING) NUMBER, NAME, OP, STRING)
...@@ -41,6 +41,24 @@ for f in testfiles: ...@@ -41,6 +41,24 @@ for f in testfiles:
test_roundtrip(f) test_roundtrip(f)
###### Test detecton of IndentationError ######################
from cStringIO import StringIO
sampleBadText = """
def foo():
bar
baz
"""
try:
for tok in generate_tokens(StringIO(sampleBadText).readline):
pass
except IndentationError:
pass
else:
raise TestFailed("Did not detect IndentationError:")
###### Test example in the docs ############################### ###### Test example in the docs ###############################
......
...@@ -271,6 +271,9 @@ def generate_tokens(readline): ...@@ -271,6 +271,9 @@ def generate_tokens(readline):
indents.append(column) indents.append(column)
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
while column < indents[-1]: while column < indents[-1]:
if column not in indents:
raise IndentationError(
"unindent does not match any outer indentation level")
indents = indents[:-1] indents = indents[:-1]
yield (DEDENT, '', (lnum, pos), (lnum, pos), line) yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
......
...@@ -147,6 +147,9 @@ Extension Modules ...@@ -147,6 +147,9 @@ Extension Modules
Library Library
------- -------
- The tokenize module now detects and reports indentation errors.
Bug #1224621.
- The tokenize module has a new untokenize() function to support a full - The tokenize module has a new untokenize() function to support a full
roundtrip from lexed tokens back to Python sourcecode. In addition, roundtrip from lexed tokens back to Python sourcecode. In addition,
the generate_tokens() function now accepts a callable argument that the generate_tokens() function now accepts a callable argument that
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment