summaryrefslogtreecommitdiffstats
path: root/Parser
diff options
context:
space:
mode:
authorGuido van Rossum <guido@python.org>2007-05-07 22:24:25 (GMT)
committerGuido van Rossum <guido@python.org>2007-05-07 22:24:25 (GMT)
commit805365ee39298f93e433e19ae0dd87c6f782145b (patch)
treeae8f8a3c315b49cfb2e7926d4b7e56f64c68b21c /Parser
parent598d98a7e8981e650e803e41e884ffc905b2311e (diff)
downloadcpython-805365ee39298f93e433e19ae0dd87c6f782145b.zip
cpython-805365ee39298f93e433e19ae0dd87c6f782145b.tar.gz
cpython-805365ee39298f93e433e19ae0dd87c6f782145b.tar.bz2
Merged revisions 55007-55179 via svnmerge from
svn+ssh://pythondev@svn.python.org/python/branches/p3yk ........ r55077 | guido.van.rossum | 2007-05-02 11:54:37 -0700 (Wed, 02 May 2007) | 2 lines Use the new print syntax, at least. ........ r55142 | fred.drake | 2007-05-04 21:27:30 -0700 (Fri, 04 May 2007) | 1 line remove old cruftiness ........ r55143 | fred.drake | 2007-05-04 21:52:16 -0700 (Fri, 04 May 2007) | 1 line make this work with the new Python ........ r55162 | neal.norwitz | 2007-05-06 22:29:18 -0700 (Sun, 06 May 2007) | 1 line Get asdl code gen working with Python 2.3. Should continue to work with 3.0 ........ r55164 | neal.norwitz | 2007-05-07 00:00:38 -0700 (Mon, 07 May 2007) | 1 line Verify checkins to p3yk (sic) branch go to 3000 list. ........ r55166 | neal.norwitz | 2007-05-07 00:12:35 -0700 (Mon, 07 May 2007) | 1 line Fix this test so it runs again by importing warnings_test properly. ........ r55167 | neal.norwitz | 2007-05-07 01:03:22 -0700 (Mon, 07 May 2007) | 8 lines So long xrange. range() now supports values that are outside -sys.maxint to sys.maxint. floats raise a TypeError. This has been sitting for a long time. It probably has some problems and needs cleanup. Objects/rangeobject.c now uses 4-space indents since it is almost completely new. ........ r55171 | guido.van.rossum | 2007-05-07 10:21:26 -0700 (Mon, 07 May 2007) | 4 lines Fix two tests that were previously depending on significant spaces at the end of a line (and before that on Python 2.x print behavior that has no exact equivalent in 3.0). ........
Diffstat (limited to 'Parser')
-rw-r--r--Parser/asdl.py37
-rwxr-xr-xParser/asdl_c.py25
-rw-r--r--Parser/spark.py50
3 files changed, 64 insertions, 48 deletions
diff --git a/Parser/asdl.py b/Parser/asdl.py
index 08dc848..b1afd0f 100644
--- a/Parser/asdl.py
+++ b/Parser/asdl.py
@@ -13,10 +13,15 @@ Changes for Python: Add support for module versions
#__metaclass__ = type
import os
+import sys
import traceback
import spark
+def output(string):
+ sys.stdout.write(string + "\n")
+
+
class Token:
# spark seems to dispatch in the parser based on a token's
# type attribute
@@ -45,7 +50,7 @@ class String(Token):
self.value = value
self.lineno = lineno
-class ASDLSyntaxError:
+class ASDLSyntaxError(Exception):
def __init__(self, lineno, token=None, msg=None):
self.lineno = lineno
@@ -128,7 +133,7 @@ class ASDLParser(spark.GenericParser, object):
"version ::= Id String"
if version.value != "version":
raise ASDLSyntaxError(version.lineno,
- msg="expected 'version', found %" % version)
+ msg="expected 'version', found %" % version)
return V
def p_definition_0(self, (definition,)):
@@ -306,9 +311,9 @@ class VisitorBase(object):
return
try:
meth(object, *args)
- except Exception, err:
- print "Error visiting", repr(object)
- print err
+ except Exception:
+ output("Error visiting", repr(object))
+ output(sys.exc_info()[1])
traceback.print_exc()
# XXX hack
if hasattr(self, 'file'):
@@ -353,8 +358,8 @@ class Check(VisitorBase):
if conflict is None:
self.cons[key] = name
else:
- print "Redefinition of constructor %s" % key
- print "Defined in %s and %s" % (conflict, name)
+ output("Redefinition of constructor %s" % key)
+ output("Defined in %s and %s" % (conflict, name))
self.errors += 1
for f in cons.fields:
self.visit(f, key)
@@ -376,7 +381,7 @@ def check(mod):
if t not in mod.types and not t in builtin_types:
v.errors += 1
uses = ", ".join(v.types[t])
- print "Undefined type %s, used in %s" % (t, uses)
+ output("Undefined type %s, used in %s" % (t, uses))
return not v.errors
@@ -388,10 +393,10 @@ def parse(file):
tokens = scanner.tokenize(buf)
try:
return parser.parse(tokens)
- except ASDLSyntaxError, err:
- print err
+ except ASDLSyntaxError:
+ output(sys.exc_info()[1])
lines = buf.split("\n")
- print lines[err.lineno - 1] # lines starts at 0, files at 1
+ output(lines[err.lineno - 1]) # lines starts at 0, files at 1
if __name__ == "__main__":
import glob
@@ -404,12 +409,12 @@ if __name__ == "__main__":
files = glob.glob(testdir + "/*.asdl")
for file in files:
- print file
+ output(file)
mod = parse(file)
- print "module", mod.name
- print len(mod.dfns), "definitions"
+ output("module", mod.name)
+ output(len(mod.dfns), "definitions")
if not check(mod):
- print "Check failed"
+ output("Check failed")
else:
for dfn in mod.dfns:
- print dfn.type
+ output(dfn.type)
diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py
index 325a2b6..8c33e9d 100755
--- a/Parser/asdl_c.py
+++ b/Parser/asdl_c.py
@@ -718,7 +718,7 @@ class ChainOfVisitors:
v.visit(object)
v.emit("", 0)
-common_msg = "/* File automatically generated by %s. */\n"
+common_msg = "/* File automatically generated by %s. */\n\n"
c_file_msg = """
/*
@@ -728,6 +728,7 @@ c_file_msg = """
The __version__ number is set to the revision number of the commit
containing the grammar change.
*/
+
"""
def main(srcfile):
@@ -741,25 +742,25 @@ def main(srcfile):
if INC_DIR:
p = "%s/%s-ast.h" % (INC_DIR, mod.name)
f = open(p, "wb")
- print >> f, auto_gen_msg
- print >> f, '#include "asdl.h"\n'
+ f.write(auto_gen_msg)
+ f.write('#include "asdl.h"\n\n')
c = ChainOfVisitors(TypeDefVisitor(f),
StructVisitor(f),
PrototypeVisitor(f),
)
c.visit(mod)
- print >>f, "PyObject* PyAST_mod2obj(mod_ty t);"
+ f.write("PyObject* PyAST_mod2obj(mod_ty t);\n")
f.close()
if SRC_DIR:
p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c")
f = open(p, "wb")
- print >> f, auto_gen_msg
- print >> f, c_file_msg % parse_version(mod)
- print >> f, '#include "Python.h"'
- print >> f, '#include "%s-ast.h"' % mod.name
- print >> f
- print >>f, "static PyTypeObject* AST_type;"
+ f.write(auto_gen_msg)
+ f.write(c_file_msg % parse_version(mod))
+ f.write('#include "Python.h"\n')
+ f.write('#include "%s-ast.h"\n' % mod.name)
+ f.write('\n')
+ f.write("static PyTypeObject* AST_type;\n")
v = ChainOfVisitors(
PyTypesDeclareVisitor(f),
PyTypesVisitor(f),
@@ -779,7 +780,7 @@ if __name__ == "__main__":
SRC_DIR = ''
opts, args = getopt.getopt(sys.argv[1:], "h:c:")
if len(opts) != 1:
- print "Must specify exactly one output file"
+ sys.stdout.write("Must specify exactly one output file\n")
sys.exit(1)
for o, v in opts:
if o == '-h':
@@ -787,6 +788,6 @@ if __name__ == "__main__":
if o == '-c':
SRC_DIR = v
if len(args) != 1:
- print "Must specify single input file"
+ sys.stdout.write("Must specify single input file\n")
sys.exit(1)
main(args[0])
diff --git a/Parser/spark.py b/Parser/spark.py
index 0b3292f..7035077 100644
--- a/Parser/spark.py
+++ b/Parser/spark.py
@@ -23,7 +23,18 @@ __version__ = 'SPARK-0.7 (pre-alpha-5)'
import re
import sys
-import string
+
+# Compatability with older pythons.
+def output(string='', end='\n'):
+ sys.stdout.write(string + end)
+
+try:
+ sorted
+except NameError:
+ def sorted(seq):
+ seq2 = seq[:]
+ seq2.sort()
+ return seq2
def _namelist(instance):
namelist, namedict, classlist = [], {}, [instance.__class__]
@@ -57,10 +68,10 @@ class GenericScanner:
rv.append(self.makeRE(name))
rv.append(self.makeRE('t_default'))
- return string.join(rv, '|')
+ return '|'.join(rv)
def error(self, s, pos):
- print "Lexical error at position %s" % pos
+ output("Lexical error at position %s" % pos)
raise SystemExit
def tokenize(self, s):
@@ -79,7 +90,7 @@ class GenericScanner:
def t_default(self, s):
r'( . | \n )+'
- print "Specification error: unmatched input"
+ output("Specification error: unmatched input")
raise SystemExit
#
@@ -172,7 +183,7 @@ class GenericParser:
def addRule(self, doc, func, _preprocess=1):
fn = func
- rules = string.split(doc)
+ rules = doc.split()
index = []
for i in range(len(rules)):
@@ -296,7 +307,7 @@ class GenericParser:
return None
def error(self, token):
- print "Syntax error at or near `%s' token" % token
+ output("Syntax error at or near `%s' token" % token)
raise SystemExit
def parse(self, tokens):
@@ -313,7 +324,7 @@ class GenericParser:
self.states = { 0: self.makeState0() }
self.makeState(0, self._BOF)
- for i in xrange(len(tokens)):
+ for i in range(len(tokens)):
sets.append([])
if sets[i] == []:
@@ -419,8 +430,7 @@ class GenericParser:
# need to know the entire set of predicted nonterminals
# to do this without accidentally duplicating states.
#
- core = predicted.keys()
- core.sort()
+ core = sorted(predicted.keys())
tcore = tuple(core)
if tcore in self.cores:
self.edges[(k, None)] = self.cores[tcore]
@@ -605,7 +615,7 @@ class GenericParser:
rule = self.ambiguity(self.newrules[nt])
else:
rule = self.newrules[nt][0]
- #print rule
+ #output(rule)
rhs = rule[1]
attr = [None] * len(rhs)
@@ -624,7 +634,7 @@ class GenericParser:
rule = choices[0]
if len(choices) > 1:
rule = self.ambiguity(choices)
- #print rule
+ #output(rule)
rhs = rule[1]
attr = [None] * len(rhs)
@@ -826,15 +836,15 @@ class GenericASTMatcher(GenericParser):
def _dump(tokens, sets, states):
for i in range(len(sets)):
- print 'set', i
+ output('set %d' % i)
for item in sets[i]:
- print '\t', item
+ output('\t', item)
for (lhs, rhs), pos in states[item[0]].items:
- print '\t\t', lhs, '::=',
- print string.join(rhs[:pos]),
- print '.',
- print string.join(rhs[pos:])
+ output('\t\t', lhs, '::=', end='')
+ output(' '.join(rhs[:pos]), end='')
+ output('.', end='')
+ output(' '.join(rhs[pos:]))
if i < len(tokens):
- print
- print 'token', str(tokens[i])
- print
+ output()
+ output('token %s' % str(tokens[i]))
+ output()