diff options
author | Raymond Hettinger <python@rcn.com> | 2005-06-21 07:43:58 (GMT) |
---|---|---|
committer | Raymond Hettinger <python@rcn.com> | 2005-06-21 07:43:58 (GMT) |
commit | da99d1cbfeedafd41263ac2d1b397d57c14ab28e (patch) | |
tree | 998e22dfe11672ffa2d7c6ee69b4af1e59914349 | |
parent | 8fa7eb563bb9a14651bcdc8ee60c5e45302c2f59 (diff) | |
download | cpython-da99d1cbfeedafd41263ac2d1b397d57c14ab28e.zip cpython-da99d1cbfeedafd41263ac2d1b397d57c14ab28e.tar.gz cpython-da99d1cbfeedafd41263ac2d1b397d57c14ab28e.tar.bz2 |
SF bug #1224621: tokenize module does not detect inconsistent dedents
-rw-r--r-- | Lib/test/test_tokenize.py | 20 | ||||
-rw-r--r-- | Lib/tokenize.py | 3 | ||||
-rw-r--r-- | Misc/NEWS | 3 |
3 files changed, 25 insertions, 1 deletions
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 2ce435f..d3c1cc4 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -1,4 +1,4 @@ -from test.test_support import verbose, findfile, is_resource_enabled +from test.test_support import verbose, findfile, is_resource_enabled, TestFailed import os, glob, random from tokenize import (tokenize, generate_tokens, untokenize, NUMBER, NAME, OP, STRING) @@ -41,6 +41,24 @@ for f in testfiles: test_roundtrip(f) +###### Test detecton of IndentationError ###################### + +from cStringIO import StringIO + +sampleBadText = """ +def foo(): + bar + baz +""" + +try: + for tok in generate_tokens(StringIO(sampleBadText).readline): + pass +except IndentationError: + pass +else: + raise TestFailed("Did not detect IndentationError:") + ###### Test example in the docs ############################### diff --git a/Lib/tokenize.py b/Lib/tokenize.py index b29da6b..2b40e6f 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -271,6 +271,9 @@ def generate_tokens(readline): indents.append(column) yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) while column < indents[-1]: + if column not in indents: + raise IndentationError( + "unindent does not match any outer indentation level") indents = indents[:-1] yield (DEDENT, '', (lnum, pos), (lnum, pos), line) @@ -147,6 +147,9 @@ Extension Modules Library ------- +- The tokenize module now detects and reports indentation errors. + Bug #1224621. + - The tokenize module has a new untokenize() function to support a full roundtrip from lexed tokens back to Python sourcecode. In addition, the generate_tokens() function now accepts a callable argument that |