From b31c7f732aea6abf6ce24d3da7fd67b2172acec9 Mon Sep 17 00:00:00 2001 From: Guido van Rossum Date: Thu, 11 Nov 1993 10:31:23 +0000 Subject: * test_select.py: (some) tests for built-in select module * test_grammar.py, testall.out: added test for funny things in string literals * token.py, symbol.py: definitions used with built-in parser module. * tokenize.py: added double-quote recognition --- Lib/symbol.py | 60 ++++++++++++++++++++++++++++++++++++++++++++++++ Lib/test/test_grammar.py | 16 +++++++++++++ Lib/test/test_select.py | 25 ++++++++++++++++++++ Lib/test/testall.out | 1 + Lib/token.py | 50 ++++++++++++++++++++++++++++++++++++++++ Lib/tokenize.py | 4 +++- 6 files changed, 155 insertions(+), 1 deletion(-) create mode 100755 Lib/symbol.py create mode 100644 Lib/test/test_select.py create mode 100755 Lib/token.py diff --git a/Lib/symbol.py b/Lib/symbol.py new file mode 100755 index 0000000..1422f12 --- /dev/null +++ b/Lib/symbol.py @@ -0,0 +1,60 @@ +# Non-terminal symbols of Python grammar (from "graminit.h") + +single_input = 256 +file_input = 257 +eval_input = 258 +lambda_input = 259 +funcdef = 260 +parameters = 261 +varargslist = 262 +fpdef = 263 +fplist = 264 +stmt = 265 +simple_stmt = 266 +small_stmt = 267 +expr_stmt = 268 +print_stmt = 269 +del_stmt = 270 +pass_stmt = 271 +flow_stmt = 272 +break_stmt = 273 +continue_stmt = 274 +return_stmt = 275 +raise_stmt = 276 +import_stmt = 277 +global_stmt = 278 +access_stmt = 279 +accesstype = 280 +exec_stmt = 281 +compound_stmt = 282 +if_stmt = 283 +while_stmt = 284 +for_stmt = 285 +try_stmt = 286 +except_clause = 287 +suite = 288 +test = 289 +and_test = 290 +not_test = 291 +comparison = 292 +comp_op = 293 +expr = 294 +xor_expr = 295 +and_expr = 296 +shift_expr = 297 +arith_expr = 298 +term = 299 +factor = 300 +atom = 301 +trailer = 302 +subscript = 303 +exprlist = 304 +testlist = 305 +dictmaker = 306 +classdef = 307 + +names = dir() +sym_name = {} +for name in names: + number = eval(name) + sym_name[number] = name diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index 4c9e7b0..742477f 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -62,6 +62,22 @@ x = 3.e14 x = .3e14 x = 3.1e4 +print '1.1.3 String literals' + +def assert(s): + if not s: raise TestFailed, 'see traceback' + +x = ''; y = ""; assert(len(x) == 0 and x == y) +x = '\''; y = "'"; assert(len(x) == 1 and x == y and ord(x) == 39) +x = '"'; y = "\""; assert(len(x) == 1 and x == y and ord(x) == 34) +x = "doesn't \"shrink\" does it" +y = 'doesn\'t "shrink" does it' +assert(len(x) == 24 and x == y) +x = "doesn \"shrink\" doesn't it" +y = 'doesn "shrink" doesn\'t it' +assert(len(x) == 25 and x == y) + + print '1.2 Grammar' print 'single_input' # NEWLINE | simple_stmt | compound_stmt NEWLINE diff --git a/Lib/test/test_select.py b/Lib/test/test_select.py new file mode 100644 index 0000000..89088ef --- /dev/null +++ b/Lib/test/test_select.py @@ -0,0 +1,25 @@ +# Testing select module + +from test_support import * + +def test(): + import select + import os + cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do date; sleep 3; done' + p = os.popen(cmd, 'r') + for tout in (0, 1, 2, 4, 8, 16) + (None,)*10: + print 'timeout =', tout + rfd, wfd, xfd = select.select([p], [], [], tout) + print rfd, wfd, xfd + if (rfd, wfd, xfd) == ([], [], []): + continue + if (rfd, wfd, xfd) == ([p], [], []): + line = p.readline() + print `line` + if not line: + print 'EOF' + break + continue + print 'Heh?' + +test() diff --git a/Lib/test/testall.out b/Lib/test/testall.out index 1dbb42a..90c1202 100644 --- a/Lib/test/testall.out +++ b/Lib/test/testall.out @@ -6,6 +6,7 @@ test_grammar 1.1.2.1 Plain integers 1.1.2.2 Long integers 1.1.2.3 Floating point +1.1.3 String literals 1.2 Grammar single_input file_input diff --git a/Lib/token.py b/Lib/token.py new file mode 100755 index 0000000..75ac39a --- /dev/null +++ b/Lib/token.py @@ -0,0 +1,50 @@ +# Tokens (from "token.h") + +ENDMARKER = 0 +NAME = 1 +NUMBER = 2 +STRING = 3 +NEWLINE = 4 +INDENT = 5 +DEDENT = 6 +LPAR = 7 +RPAR = 8 +LSQB = 9 +RSQB = 10 +COLON = 11 +COMMA = 12 +SEMI = 13 +PLUS = 14 +MINUS = 15 +STAR = 16 +SLASH = 17 +VBAR = 18 +AMPER = 19 +LESS = 20 +GREATER = 21 +EQUAL = 22 +DOT = 23 +PERCENT = 24 +BACKQUOTE = 25 +LBRACE = 26 +RBRACE = 27 +EQEQUAL = 28 +NOTEQUAL = 29 +LESSEQUAL = 30 +GREATEREQUAL = 31 +TILDE = 32 +CIRCUMFLEX = 33 +LEFTSHIFT = 34 +RIGHTSHIFT = 35 +OP = 36 +ERRORTOKEN = 37 + +names = dir() +tok_name = {} +for name in names: + number = eval(name) + tok_name[number] = name + +N_TOKENS = 38 # Number of tokens including ERRORTOKEN + +NT_OFFSET = 256 # Start of non-terminal symbols diff --git a/Lib/tokenize.py b/Lib/tokenize.py index bd047f8..8f16115 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -24,7 +24,9 @@ Expfloat = '[0-9]+' + Exponent Floatnumber = Pointfloat + '\|' + Expfloat Number = Floatnumber + '\|' + Intnumber -String = '\'\(\\\\.\|[^\\\n\']\)*\'' +String = '\'\(\\\\.\|[^\\\n\']\)*\'' + '\|' + '"\(\\\\.\|[^\\\n"]\)*"' +# Note: this module *recognizes* double quotes, but for backward +# compatibility, it doesn't *use* them! Operator = '~\|\+\|-\|\*\|/\|%\|\^\|&\||\|<<\|>>\|==\|<=\|<>\|!=\|>=\|=\|<\|>' Bracket = '[][(){}]' -- cgit v0.12