Commit 21db77e3 authored by Benjamin Peterson's avatar Benjamin Peterson

simply by using itertools.chain()

parent 7dc72cc1
...@@ -377,17 +377,12 @@ def tokenize(readline): ...@@ -377,17 +377,12 @@ def tokenize(readline):
The first token sequence will always be an ENCODING token The first token sequence will always be an ENCODING token
which tells you which encoding was used to decode the bytes stream. which tells you which encoding was used to decode the bytes stream.
""" """
# This import is here to avoid problems when the itertools module is not
# built yet and tokenize is imported.
from itertools import chain
encoding, consumed = detect_encoding(readline) encoding, consumed = detect_encoding(readline)
def readline_generator(consumed): rl_iter = iter(readline, "")
for line in consumed: return _tokenize(chain(consumed, rl_iter).__next__, encoding)
yield line
while True:
try:
yield readline()
except StopIteration:
return
chained = readline_generator(consumed)
return _tokenize(chained.__next__, encoding)
def _tokenize(readline, encoding): def _tokenize(readline, encoding):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment