Commit b9d10d08 authored by Alexander Belopolsky's avatar Alexander Belopolsky

Issue #10386: Added __all__ to token module; this simplifies importing

in tokenize module and prevents leaking of private names through
import *.
parent bb27c128
#! /usr/bin/env python3
"""Token constants (from "token.h")."""
__all__ = ['tok_name', 'ISTERMINAL', 'ISNONTERMINAL', 'ISEOF']
# This file is automatically generated; please don't muck it up!
#
# To update the symbols in this file, 'cd' to the top directory of
......@@ -68,12 +68,10 @@ N_TOKENS = 55
NT_OFFSET = 256
#--end constants--
tok_name = {}
for _name, _value in list(globals().items()):
if type(_value) is type(0):
tok_name[_value] = _name
del _name, _value
tok_name = {value: name
for name, value in globals().items()
if isinstance(value, int)}
__all__.extend(tok_name.values())
def ISTERMINAL(x):
return x < NT_OFFSET
......@@ -85,7 +83,7 @@ def ISEOF(x):
return x == ENDMARKER
def main():
def _main():
import re
import sys
args = sys.argv[1:]
......@@ -139,4 +137,4 @@ def main():
if __name__ == "__main__":
main()
_main()
......@@ -33,9 +33,8 @@ from io import TextIOWrapper
cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
import token
__all__ = [x for x in dir(token) if not x.startswith("_")]
__all__.extend(["COMMENT", "tokenize", "detect_encoding", "NL", "untokenize",
"ENCODING", "TokenInfo"])
__all__ = token.__all__ + ["COMMENT", "tokenize", "detect_encoding",
"NL", "untokenize", "ENCODING", "TokenInfo"]
del token
COMMENT = N_TOKENS
......
......@@ -63,6 +63,10 @@ Core and Builtins
Library
-------
- Issue #10386: Add __all__ to token module; this simplifies importing
in tokenize module and prevents leaking of private names through
import *.
- Issue #4471: Properly shutdown socket in IMAP.shutdown(). Patch by
Lorenzo M. Catucci.
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment