summaryrefslogtreecommitdiffstats
path: root/Lib/tokenize.py
diff options
context:
space:
mode:
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r--Lib/tokenize.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 42aafe4..22f28c4 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -124,14 +124,14 @@ def tokenize(readline, tokeneater=printtoken):
"""
The tokenize() function accepts two parameters: one representing the
input stream, and one providing an output mechanism for tokenize().
-
+
The first parameter, readline, must be a callable object which provides
the same interface as the readline() method of built-in file objects.
- Each call to the function should return one line of input as a string.
+ Each call to the function should return one line of input as a string.
The second parameter, tokeneater, must also be a callable object. It is
called once for each token, with five arguments, corresponding to the
- tuples generated by generate_tokens().
+ tuples generated by generate_tokens().
"""
try:
tokenize_loop(readline, tokeneater)
@@ -149,13 +149,13 @@ def generate_tokens(readline):
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string.
-
+
The generator produces 5-tuples with these members: the token type; the
token string; a 2-tuple (srow, scol) of ints specifying the row and
column where the token begins in the source; a 2-tuple (erow, ecol) of
ints specifying the row and column where the token ends in the source;
and the line on which the token was found. The line passed is the
- logical line; continuation lines are included.
+ logical line; continuation lines are included.
"""
lnum = parenlev = continued = 0
namechars, numchars = string.ascii_letters + '_', '0123456789'