summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
authorVictor Stinner <victor.stinner@gmail.com>2014-12-05 09:17:10 (GMT)
committerVictor Stinner <victor.stinner@gmail.com>2014-12-05 09:17:10 (GMT)
commit969175091c4556e5b7e128ba91ae39f0b80153af (patch)
tree374d5bca899dec2a8531e679ec59680754c343ba /Lib
parent3ab745e3867f2ac1ecc5520c7e347959c8d79945 (diff)
downloadcpython-969175091c4556e5b7e128ba91ae39f0b80153af.zip
cpython-969175091c4556e5b7e128ba91ae39f0b80153af.tar.gz
cpython-969175091c4556e5b7e128ba91ae39f0b80153af.tar.bz2
Issue #22599: Enhance tokenize.open() to be able to call it during Python
finalization. Before the module kept a reference to the builtins module, but the module attributes are cleared during Python finalization. Instead, keep directly a reference to the open() function. This enhancement is not perfect, calling tokenize.open() can still fail if called very late during Python finalization. Usually, the function is called by the linecache module which is called to display a traceback or emit a warning.
Diffstat (limited to 'Lib')
-rw-r--r--Lib/tokenize.py7
1 files changed, 4 insertions, 3 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 98e9122..5b47ebd 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -24,7 +24,6 @@ __author__ = 'Ka-Ping Yee <ping@lfw.org>'
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
'Michael Foord')
-import builtins
from codecs import lookup, BOM_UTF8
import collections
from io import TextIOWrapper
@@ -430,11 +429,13 @@ def detect_encoding(readline):
return default, [first, second]
+_builtin_open = open
+
def open(filename):
"""Open a file in read only mode using the encoding detected by
detect_encoding().
"""
- buffer = builtins.open(filename, 'rb')
+ buffer = _builtin_open(filename, 'rb')
encoding, lines = detect_encoding(buffer.readline)
buffer.seek(0)
text = TextIOWrapper(buffer, encoding, line_buffering=True)
@@ -657,7 +658,7 @@ def main():
# Tokenize the input
if args.filename:
filename = args.filename
- with builtins.open(filename, 'rb') as f:
+ with _builtin_open(filename, 'rb') as f:
tokens = list(tokenize(f.readline))
else:
filename = "<stdin>"