[Python-3000-checkins] r55162 - python/branches/p3yk/Parser/asdl.py python/branches/p3yk/Parser/asdl_c.py python/branches/p3yk/Parser/spark.py
neal.norwitz
python-3000-checkins at python.org
Mon May 7 07:29:19 CEST 2007
Author: neal.norwitz
Date: Mon May 7 07:29:18 2007
New Revision: 55162
Modified:
python/branches/p3yk/Parser/asdl.py
python/branches/p3yk/Parser/asdl_c.py
python/branches/p3yk/Parser/spark.py
Log:
Get asdl code gen working with Python 2.3. Should continue to work with 3.0
Modified: python/branches/p3yk/Parser/asdl.py
==============================================================================
--- python/branches/p3yk/Parser/asdl.py (original)
+++ python/branches/p3yk/Parser/asdl.py Mon May 7 07:29:18 2007
@@ -13,10 +13,15 @@
#__metaclass__ = type
import os
+import sys
import traceback
import spark
+def output(string):
+ sys.stdout.write(string + "\n")
+
+
class Token:
# spark seems to dispatch in the parser based on a token's
# type attribute
@@ -306,9 +311,9 @@
return
try:
meth(object, *args)
- except Exception as err:
- print("Error visiting", repr(object))
- print(err)
+ except Exception:
+ output("Error visiting", repr(object))
+ output(sys.exc_info()[1])
traceback.print_exc()
# XXX hack
if hasattr(self, 'file'):
@@ -353,8 +358,8 @@
if conflict is None:
self.cons[key] = name
else:
- print("Redefinition of constructor %s" % key)
- print("Defined in %s and %s" % (conflict, name))
+ output("Redefinition of constructor %s" % key)
+ output("Defined in %s and %s" % (conflict, name))
self.errors += 1
for f in cons.fields:
self.visit(f, key)
@@ -376,7 +381,7 @@
if t not in mod.types and not t in builtin_types:
v.errors += 1
uses = ", ".join(v.types[t])
- print("Undefined type %s, used in %s" % (t, uses))
+ output("Undefined type %s, used in %s" % (t, uses))
return not v.errors
@@ -388,10 +393,10 @@
tokens = scanner.tokenize(buf)
try:
return parser.parse(tokens)
- except ASDLSyntaxError as err:
- print(err)
+ except ASDLSyntaxError:
+ output(sys.exc_info()[1])
lines = buf.split("\n")
- print(lines[err.lineno - 1]) # lines starts at 0, files at 1
+ output(lines[err.lineno - 1]) # lines starts at 0, files at 1
if __name__ == "__main__":
import glob
@@ -404,12 +409,12 @@
files = glob.glob(testdir + "/*.asdl")
for file in files:
- print(file)
+ output(file)
mod = parse(file)
- print("module", mod.name)
- print(len(mod.dfns), "definitions")
+ output("module", mod.name)
+ output(len(mod.dfns), "definitions")
if not check(mod):
- print("Check failed")
+ output("Check failed")
else:
for dfn in mod.dfns:
- print(dfn.type)
+ output(dfn.type)
Modified: python/branches/p3yk/Parser/asdl_c.py
==============================================================================
--- python/branches/p3yk/Parser/asdl_c.py (original)
+++ python/branches/p3yk/Parser/asdl_c.py Mon May 7 07:29:18 2007
@@ -718,7 +718,7 @@
v.visit(object)
v.emit("", 0)
-common_msg = "/* File automatically generated by %s. */\n"
+common_msg = "/* File automatically generated by %s. */\n\n"
c_file_msg = """
/*
@@ -728,6 +728,7 @@
The __version__ number is set to the revision number of the commit
containing the grammar change.
*/
+
"""
def main(srcfile):
@@ -741,25 +742,25 @@
if INC_DIR:
p = "%s/%s-ast.h" % (INC_DIR, mod.name)
f = open(p, "wb")
- print(auto_gen_msg, file=f)
- print('#include "asdl.h"\n', file=f)
+ f.write(auto_gen_msg)
+ f.write('#include "asdl.h"\n\n')
c = ChainOfVisitors(TypeDefVisitor(f),
StructVisitor(f),
PrototypeVisitor(f),
)
c.visit(mod)
- print("PyObject* PyAST_mod2obj(mod_ty t);", file=f)
+ f.write("PyObject* PyAST_mod2obj(mod_ty t);\n")
f.close()
if SRC_DIR:
p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c")
f = open(p, "wb")
- print(auto_gen_msg, file=f)
- print(c_file_msg % parse_version(mod), file=f)
- print('#include "Python.h"', file=f)
- print('#include "%s-ast.h"' % mod.name, file=f)
- print(file=f)
- print("static PyTypeObject* AST_type;", file=f)
+ f.write(auto_gen_msg)
+ f.write(c_file_msg % parse_version(mod))
+ f.write('#include "Python.h"\n')
+ f.write('#include "%s-ast.h"\n' % mod.name)
+ f.write('\n')
+ f.write("static PyTypeObject* AST_type;\n")
v = ChainOfVisitors(
PyTypesDeclareVisitor(f),
PyTypesVisitor(f),
@@ -779,7 +780,7 @@
SRC_DIR = ''
opts, args = getopt.getopt(sys.argv[1:], "h:c:")
if len(opts) != 1:
- print("Must specify exactly one output file")
+ sys.stdout.write("Must specify exactly one output file\n")
sys.exit(1)
for o, v in opts:
if o == '-h':
@@ -787,6 +788,6 @@
if o == '-c':
SRC_DIR = v
if len(args) != 1:
- print("Must specify single input file")
+ sys.stdout.write("Must specify single input file\n")
sys.exit(1)
main(args[0])
Modified: python/branches/p3yk/Parser/spark.py
==============================================================================
--- python/branches/p3yk/Parser/spark.py (original)
+++ python/branches/p3yk/Parser/spark.py Mon May 7 07:29:18 2007
@@ -22,6 +22,19 @@
__version__ = 'SPARK-0.7 (pre-alpha-5)'
import re
+import sys
+
+# Compatability with older pythons.
+def output(string='', end='\n'):
+ sys.stdout.write(string + end)
+
+try:
+ sorted
+except NameError:
+ def sorted(seq):
+ seq2 = seq[:]
+ seq2.sort()
+ return seq2
def _namelist(instance):
namelist, namedict, classlist = [], {}, [instance.__class__]
@@ -58,7 +71,7 @@
return '|'.join(rv)
def error(self, s, pos):
- print("Lexical error at position %s" % pos)
+ output("Lexical error at position %s" % pos)
raise SystemExit
def tokenize(self, s):
@@ -77,7 +90,7 @@
def t_default(self, s):
r'( . | \n )+'
- print("Specification error: unmatched input")
+ output("Specification error: unmatched input")
raise SystemExit
#
@@ -294,7 +307,7 @@
return None
def error(self, token):
- print("Syntax error at or near `%s' token" % token)
+ output("Syntax error at or near `%s' token" % token)
raise SystemExit
def parse(self, tokens):
@@ -602,7 +615,7 @@
rule = self.ambiguity(self.newrules[nt])
else:
rule = self.newrules[nt][0]
- #print(rule)
+ #output(rule)
rhs = rule[1]
attr = [None] * len(rhs)
@@ -621,7 +634,7 @@
rule = choices[0]
if len(choices) > 1:
rule = self.ambiguity(choices)
- #print(rule)
+ #output(rule)
rhs = rule[1]
attr = [None] * len(rhs)
@@ -823,15 +836,15 @@
def _dump(tokens, sets, states):
for i in range(len(sets)):
- print('set', i)
+ output('set %d' % i)
for item in sets[i]:
- print('\t', item)
+ output('\t', item)
for (lhs, rhs), pos in states[item[0]].items:
- print('\t\t', lhs, '::=', end='')
- print(' '.join(rhs[:pos]), end='')
- print('.', end='')
- print(' '.join(rhs[pos:]))
+ output('\t\t', lhs, '::=', end='')
+ output(' '.join(rhs[:pos]), end='')
+ output('.', end='')
+ output(' '.join(rhs[pos:]))
if i < len(tokens):
- print()
- print('token', str(tokens[i]))
- print()
+ output()
+ output('token %s' % str(tokens[i]))
+ output()
More information about the Python-3000-checkins
mailing list