diff options
author | Brian Curtin <brian.curtin@gmail.com> | 2010-10-30 21:37:28 (GMT) |
---|---|---|
committer | Brian Curtin <brian.curtin@gmail.com> | 2010-10-30 21:37:28 (GMT) |
commit | a0ba0f38df7a18206f109f575d69f1eef8a3a356 (patch) | |
tree | 0917602842e06d617e013762e69f059dd9b1817b | |
parent | 32105f401d25d195a0668b56140d9fdfae439ca1 (diff) | |
download | cpython-a0ba0f38df7a18206f109f575d69f1eef8a3a356.zip cpython-a0ba0f38df7a18206f109f575d69f1eef8a3a356.tar.gz cpython-a0ba0f38df7a18206f109f575d69f1eef8a3a356.tar.bz2 |
Merged revisions 85990 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/py3k
........
r85990 | brian.curtin | 2010-10-30 16:35:28 -0500 (Sat, 30 Oct 2010) | 2 lines
Fix #10258 - clean up resource warning
........
-rw-r--r-- | Lib/test/test_tokenize.py | 6 |
1 files changed, 4 insertions, 2 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 5a25251..482af94 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -592,8 +592,10 @@ def roundtrip(f): """ if isinstance(f, str): f = BytesIO(f.encode('utf-8')) - token_list = list(tokenize(f.readline)) - f.close() + try: + token_list = list(tokenize(f.readline)) + finally: + f.close() tokens1 = [tok[:2] for tok in token_list] new_bytes = untokenize(tokens1) readline = (line for line in new_bytes.splitlines(1)).__next__ |