summaryrefslogtreecommitdiffstats
path: root/Lib/tokenize.py
diff options
context:
space:
mode:
authorRaymond Hettinger <python@rcn.com>2009-04-29 14:21:25 (GMT)
committerRaymond Hettinger <python@rcn.com>2009-04-29 14:21:25 (GMT)
commitaa17a7fc98773e0f2b2a23e59a0a2b3d9f1bca84 (patch)
treede985684b8fd28db9ddcab0a869345a03aaa650a /Lib/tokenize.py
parentdf9d4d6c7fa96e0a7a5d5bc3607fb0ff4977bb4d (diff)
downloadcpython-aa17a7fc98773e0f2b2a23e59a0a2b3d9f1bca84.zip
cpython-aa17a7fc98773e0f2b2a23e59a0a2b3d9f1bca84.tar.gz
cpython-aa17a7fc98773e0f2b2a23e59a0a2b3d9f1bca84.tar.bz2
Remove dependency on the collections module.
Diffstat (limited to 'Lib/tokenize.py')
-rw-r--r--Lib/tokenize.py44
1 files changed, 41 insertions, 3 deletions
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index e9114c6..f83bda5 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -24,7 +24,6 @@ __credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
'Skip Montanaro, Raymond Hettinger, Trent Nelson, '
'Michael Foord')
-import collections
import re, string, sys
from token import *
from codecs import lookup, BOM_UTF8
@@ -32,7 +31,7 @@ cookie_re = re.compile("coding[:=]\s*([-\w.]+)")
import token
__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
- "detect_encoding", "NL", "untokenize", "ENCODING", "Tokenize"]
+ "detect_encoding", "NL", "untokenize", "ENCODING", "TokenInfo"]
del token
COMMENT = N_TOKENS
@@ -43,7 +42,46 @@ ENCODING = N_TOKENS + 2
tok_name[ENCODING] = 'ENCODING'
N_TOKENS += 3
-TokenInfo = collections.namedtuple('TokenInfo', 'type string start end line')
+class TokenInfo(tuple):
+ 'TokenInfo(type, string, start, end, line)'
+
+ __slots__ = ()
+
+ _fields = ('type', 'string', 'start', 'end', 'line')
+
+ def __new__(cls, type, string, start, end, line):
+ return tuple.__new__(cls, (type, string, start, end, line))
+
+ @classmethod
+ def _make(cls, iterable, new=tuple.__new__, len=len):
+ 'Make a new TokenInfo object from a sequence or iterable'
+ result = new(cls, iterable)
+ if len(result) != 5:
+ raise TypeError('Expected 5 arguments, got %d' % len(result))
+ return result
+
+ def __repr__(self):
+ return 'TokenInfo(type=%r, string=%r, start=%r, end=%r, line=%r)' % self
+
+ def _asdict(self):
+ 'Return a new dict which maps field names to their values'
+ return dict(zip(self._fields, self))
+
+ def _replace(self, **kwds):
+ 'Return a new TokenInfo object replacing specified fields with new values'
+ result = self._make(map(kwds.pop, ('type', 'string', 'start', 'end', 'line'), self))
+ if kwds:
+ raise ValueError('Got unexpected field names: %r' % kwds.keys())
+ return result
+
+ def __getnewargs__(self):
+ return tuple(self)
+
+ type = property(lambda t: t[0])
+ string = property(lambda t: t[1])
+ start = property(lambda t: t[2])
+ end = property(lambda t: t[3])
+ line = property(lambda t: t[4])
def group(*choices): return '(' + '|'.join(choices) + ')'
def any(*choices): return group(*choices) + '*'