summaryrefslogtreecommitdiffstats
path: root/Lib/tokenize.py
diff options
context:
space:
mode:
authorRaymond Hettinger <python@rcn.com>2010-09-09 04:32:39 (GMT)
committerRaymond Hettinger <python@rcn.com>2010-09-09 04:32:39 (GMT)
commit6c60d099e5ed97ee0026687c1ec3401cca49c0c2 (patch)
tree09c77b969f4c41f95583d5c80786208e03d60bfc /Lib/tokenize.py
parent44d7b6ad60c151d2a265c326c1f7cf26d3c9dc18 (diff)
downloadcpython-6c60d099e5ed97ee0026687c1ec3401cca49c0c2.zip
cpython-6c60d099e5ed97ee0026687c1ec3401cca49c0c2.tar.gz
cpython-6c60d099e5ed97ee0026687c1ec3401cca49c0c2.tar.bz2
Improve the repr for the TokenInfo named tuple.
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r--Lib/tokenize.py29
1 files changed, 28 insertions, 1 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index aa4b246..c97ed8a 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -63,7 +63,9 @@ class TokenInfo(tuple):
return result
def __repr__(self):
- return 'TokenInfo(type=%r, string=%r, start=%r, end=%r, line=%r)' % self
+ typ = self[0]
+ return 'TokenInfo(type=%s, string=%r, start=%r, end=%r, line=%r)' % \
+ ((('%d (%s)' % (typ, tok_name[typ])),) + self[1:])
def _asdict(self):
'Return a new dict which maps field names to their values'
@@ -550,3 +552,28 @@ def _tokenize(readline, encoding):
# library that expect to be able to use tokenize with strings
def generate_tokens(readline):
return _tokenize(readline, None)
+
+if __name__ == "__main__":
+ # Quick sanity check
+ s = b'''def parseline(self, line):
+ """Parse the line into a command name and a string containing
+ the arguments. Returns a tuple containing (command, args, line).
+ 'command' and 'args' may be None if the line couldn't be parsed.
+ """
+ line = line.strip()
+ if not line:
+ return None, None, line
+ elif line[0] == '?':
+ line = 'help ' + line[1:]
+ elif line[0] == '!':
+ if hasattr(self, 'do_shell'):
+ line = 'shell ' + line[1:]
+ else:
+ return None, None, line
+ i, n = 0, len(line)
+ while i < n and line[i] in self.identchars: i = i+1
+ cmd, arg = line[:i], line[i:].strip()
+ return cmd, arg, line
+ '''
+ for tok in tokenize(iter(s.splitlines()).__next__):
+ print(tok)