summaryrefslogtreecommitdiffstats
path: root/Lib
diff options
context:
space:
mode:
authorVictor Stinner <victor.stinner@gmail.com>2015-05-25 22:43:58 (GMT)
committerVictor Stinner <victor.stinner@gmail.com>2015-05-25 22:43:58 (GMT)
commit387729e183365a366c48fce7a9abfcaf4ec6ff4e (patch)
treef04fdaba2e72148ae51f2cb9778f64d3bea4f7e8 /Lib
parent410d77f230078900371d67eaec9ce190a67828ee (diff)
downloadcpython-387729e183365a366c48fce7a9abfcaf4ec6ff4e.zip
cpython-387729e183365a366c48fce7a9abfcaf4ec6ff4e.tar.gz
cpython-387729e183365a366c48fce7a9abfcaf4ec6ff4e.tar.bz2
Issue #23840: tokenize.open() now closes the temporary binary file on error to
fix a resource warning.
Diffstat (limited to 'Lib')
-rw-r--r--Lib/test/test_tokenize.py10
-rw-r--r--Lib/tokenize.py14
2 files changed, 18 insertions, 6 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index 4a8be3b..9842207 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -646,7 +646,7 @@ from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
open as tokenize_open, Untokenizer)
from io import BytesIO
-from unittest import TestCase
+from unittest import TestCase, mock
import os, sys, glob
import token
@@ -1058,6 +1058,14 @@ class TestDetectEncoding(TestCase):
ins = Bunk(lines, path)
detect_encoding(ins.readline)
+ def test_open_error(self):
+ # Issue #23840: open() must close the binary file on error
+ m = BytesIO(b'#coding:xxx')
+ with mock.patch('tokenize._builtin_open', return_value=m):
+ self.assertRaises(SyntaxError, tokenize_open, 'foobar')
+ self.assertTrue(m.closed)
+
+
class TestTokenize(TestCase):
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index ed4153c..cf18bf9 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -435,11 +435,15 @@ def open(filename):
detect_encoding().
"""
buffer = _builtin_open(filename, 'rb')
- encoding, lines = detect_encoding(buffer.readline)
- buffer.seek(0)
- text = TextIOWrapper(buffer, encoding, line_buffering=True)
- text.mode = 'r'
- return text
+ try:
+ encoding, lines = detect_encoding(buffer.readline)
+ buffer.seek(0)
+ text = TextIOWrapper(buffer, encoding, line_buffering=True)
+ text.mode = 'r'
+ return text
+ except:
+ buffer.close()
+ raise
def tokenize(readline):