summaryrefslogtreecommitdiffstats
path: root/Lib/tokenize.py
diff options
context:
space:
mode:
authorBerker Peksag <berker.peksag@gmail.com>2015-12-29 23:42:43 (GMT)
committerBerker Peksag <berker.peksag@gmail.com>2015-12-29 23:42:43 (GMT)
commita7161e7facdfa1d6f673beb16a95a647ce764b32 (patch)
tree2a8040d8748902431e4a185a733f1a7b39e2c3e8 /Lib/tokenize.py
parent4e6aad1f7ac4e91a5e6c44e8ab3a2105acd65d74 (diff)
parentff8d0873aabe54009af533f9f6a76fa91392a80a (diff)
downloadcpython-a7161e7facdfa1d6f673beb16a95a647ce764b32.zip
cpython-a7161e7facdfa1d6f673beb16a95a647ce764b32.tar.gz
cpython-a7161e7facdfa1d6f673beb16a95a647ce764b32.tar.bz2
Issue #25977: Fix typos in Lib/tokenize.py
Patch by John Walker.
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r--Lib/tokenize.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 2237c3a..7a00358 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -325,8 +325,8 @@ def untokenize(iterable):
Round-trip invariant for full input:
Untokenized source will match input source exactly
- Round-trip invariant for limited intput:
- # Output bytes will tokenize the back to the input
+ Round-trip invariant for limited input:
+ # Output bytes will tokenize back to the input
t1 = [tok[:2] for tok in tokenize(f.readline)]
newcode = untokenize(t1)
readline = BytesIO(newcode).readline
@@ -462,10 +462,10 @@ def open(filename):
def tokenize(readline):
"""
- The tokenize() generator requires one argment, readline, which
+ The tokenize() generator requires one argument, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
- should return one line of input as bytes. Alternately, readline
+ should return one line of input as bytes. Alternatively, readline
can be a callable function terminating with StopIteration:
readline = open(myfile, 'rb').__next__ # Example of alternate readline
@@ -645,7 +645,7 @@ def _tokenize(readline, encoding):
# we switch to longer prefixes, this needs to be
# adjusted.
# Note that initial == token[:1].
- # Also note that single quote checking must come afer
+ # Also note that single quote checking must come after
# triple quote checking (above).
elif (initial in single_quoted or
token[:2] in single_quoted or