summaryrefslogtreecommitdiffstats
path: root/Lib/tokenize.py
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2007-04-21 15:47:16 (GMT)
committerGeorg Brandl <georg@python.org>2007-04-21 15:47:16 (GMT)
commita18af4e7a2091d11478754eb66ae387a85535763 (patch)
treefea8015d656cfee937bb6f3d106e6ca0e9f19d78 /Lib/tokenize.py
parent4d2adcca52ced412d4bdf131b872729c43520d58 (diff)
downloadcpython-a18af4e7a2091d11478754eb66ae387a85535763.zip
cpython-a18af4e7a2091d11478754eb66ae387a85535763.tar.gz
cpython-a18af4e7a2091d11478754eb66ae387a85535763.tar.bz2
PEP 3114: rename .next() to .__next__() and add next() builtin.
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r--Lib/tokenize.py4
1 files changed, 2 insertions, 2 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 8dc4c53..1a72d6f 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -228,7 +228,7 @@ def untokenize(iterable):
# Output text will tokenize the back to the input
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
newcode = untokenize(t1)
- readline = iter(newcode.splitlines(1)).next
+ readline = iter(newcode.splitlines(1)).__next__
t2 = [tok[:2] for tokin generate_tokens(readline)]
assert t1 == t2
"""
@@ -242,7 +242,7 @@ def generate_tokens(readline):
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately, readline
can be a callable function terminating with StopIteration:
- readline = open(myfile).next # Example of alternate readline
+ readline = open(myfile).__next__ # Example of alternate readline
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and